commonware-utils 2026.3.0

Leverage common functionality across multiple primitives.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
use super::*;
use crate::{bitmap::Prunable, hex};

/// Test basic dirty lifecycle: creation, operations, commit, and abort.
#[test]
fn test_dirty_lifecycle_and_operations() {
    // Empty initialization
    let bitmap: BitMap<4> = BitMap::new();
    assert_eq!(bitmap.len(), 0);
    assert!(bitmap.is_empty());
    assert_eq!(bitmap.commits().count(), 0);

    // Basic push and commit
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push(true).push(false).push(true);
        })
        .unwrap();
    assert_eq!(bitmap.len(), 3);
    assert!(bitmap.get_bit(0));
    assert!(!bitmap.get_bit(1));
    assert!(bitmap.get_bit(2));
    assert_eq!(bitmap.commits().count(), 1);

    // Abort
    let mut dirty = bitmap.into_dirty();
    dirty.push(true).push(true);
    let bitmap = dirty.abort();
    assert_eq!(bitmap.len(), 3); // Unchanged

    // Read-through semantics
    let mut dirty = bitmap.into_dirty();
    assert!(dirty.get_bit(0)); // Read unmodified
    dirty.set_bit(1, true); // Modify
    assert!(dirty.get_bit(1)); // See modification in dirty
    dirty.push(false); // Append
    assert!(!dirty.get_bit(3)); // See appended bit
    let bitmap = dirty.commit(2).unwrap();

    // After commit, changes persisted
    assert_eq!(bitmap.len(), 4);
    assert!(bitmap.get_bit(1));
    assert!(!bitmap.get_bit(3));

    // Empty dirty commit
    let bitmap = bitmap.apply_batch(3, |_dirty| {}).unwrap();
    assert_eq!(bitmap.len(), 4);
    assert!(bitmap.commit_exists(3));

    // Method chaining with dirty.set_bit()
    let bitmap = bitmap
        .apply_batch(4, |dirty| {
            dirty.set_bit(0, false).push_byte(0xAA);
        })
        .unwrap();
    assert_eq!(bitmap.len(), 12); // 4 + 8 bits
    assert!(!bitmap.get_bit(0)); // Modified
}

/// Test dirty operations: push, pop, prune, push_byte, push_chunk, and get_chunk.
#[test]
fn test_dirty_operations_push_pop_prune() {
    let bitmap: BitMap<4> = BitMap::new();

    // Push, modify, and append operations
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push(false).push(false).push(false);
        })
        .unwrap();

    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.set_bit(0, true); // Modify
            dirty.set_bit(1, true); // Modify
            dirty.push(true); // Append
            dirty.push(true); // Append
        })
        .unwrap();

    assert_eq!(bitmap.len(), 5);
    assert!(bitmap.get_bit(0));
    assert!(bitmap.get_bit(1));
    assert!(!bitmap.get_bit(2));

    // Pop operations
    let _bitmap = bitmap
        .apply_batch(3, |dirty| {
            dirty.push(false); // Add bit 5
            let popped = dirty.pop(); // Remove it
            assert!(!popped);
            assert_eq!(dirty.len(), 5); // Back to original
        })
        .unwrap();

    // Bulk push operations (push_chunk, push_byte)
    // Start fresh with 32 bits so chunks align cleanly
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push_chunk(&hex!("0x00000000"));
        })
        .unwrap();

    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.push_chunk(&hex!("0xAABBCCDD")); // 32 bits at offset 32
            dirty.push_byte(0xFF); // 8 bits at offset 64
        })
        .unwrap();

    assert_eq!(bitmap.len(), 72); // 32 + 32 + 8
    let chunk = bitmap.get_chunk_containing(32); // Read second chunk
    assert_eq!(chunk, &hex!("0xAABBCCDD"));
    for i in 64..72 {
        assert!(bitmap.get_bit(i)); // Verify pushed byte
    }

    // get_chunk with modifications in dirty
    let mut dirty = bitmap.into_dirty();
    dirty.set_bit(32, true); // First bit of second chunk
    dirty.set_bit(39, true); // 8th bit of second chunk
    let chunk = dirty.get_chunk(32);
    assert_eq!(chunk[0] & 0x01, 0x01); // bit 32 (0 in chunk) set
    assert_eq!(chunk[0] & 0x80, 0x80); // bit 39 (7 in chunk) set
    let bitmap = dirty.commit(3).unwrap();

    // Prune operations
    let bitmap = bitmap
        .apply_batch(4, |dirty| {
            dirty.prune_to_bit(32);
        })
        .unwrap();

    assert_eq!(bitmap.len(), 72); // Length unchanged
    assert_eq!(bitmap.pruned_chunks(), 1); // First chunk pruned
}

/// Test commit history management.
#[test]
fn test_commit_history_management() {
    let bitmap: BitMap<4> = BitMap::new();

    // Validate monotonic commit numbers
    let bitmap = bitmap
        .apply_batch(5, |dirty| {
            dirty.push(true);
        })
        .unwrap();

    let err = bitmap
        .clone()
        .apply_batch(5, |dirty| {
            dirty.push(false);
        })
        .unwrap_err();
    match err {
        Error::NonMonotonicCommit {
            previous,
            attempted,
        } => {
            assert_eq!(previous, 5);
            assert_eq!(attempted, 5);
        }
        _ => panic!("Expected NonMonotonicCommit error"),
    }

    let err = bitmap
        .clone()
        .apply_batch(3, |dirty| {
            dirty.push(false);
        })
        .unwrap_err();
    match err {
        Error::NonMonotonicCommit {
            previous,
            attempted,
        } => {
            assert_eq!(previous, 5);
            assert_eq!(attempted, 3);
        }
        _ => panic!("Expected NonMonotonicCommit error"),
    }

    let _bitmap = bitmap
        .apply_batch(10, |dirty| {
            dirty.push(false);
        })
        .unwrap(); // Should succeed

    // Commit queries (need fresh instance)
    let bitmap: BitMap<4> = BitMap::new();
    assert!(bitmap.earliest_commit().is_none());
    assert!(bitmap.latest_commit().is_none());
    let mut bitmap = bitmap;
    for i in 1..=5 {
        bitmap = bitmap
            .apply_batch(i * 10, |dirty| {
                dirty.push(true);
            })
            .unwrap();
    }

    assert_eq!(bitmap.earliest_commit(), Some(10));
    assert_eq!(bitmap.latest_commit(), Some(50));
    assert!(bitmap.commit_exists(30));
    assert!(!bitmap.commit_exists(25));

    let commits: Vec<u64> = bitmap.commits().collect();
    assert_eq!(commits, vec![10, 20, 30, 40, 50]);

    // Prune commits
    let removed = bitmap.prune_commits_before(30);
    assert_eq!(removed, 2);
    assert_eq!(bitmap.commits().count(), 3);

    // Clear history
    bitmap.clear_history();
    assert_eq!(bitmap.commits().count(), 0);
    assert!(bitmap.earliest_commit().is_none());
    assert!(bitmap.latest_commit().is_none());
    assert_eq!(bitmap.len(), 5); // Current state preserved
}

/// Test historical reconstruction with bit modifications across multiple commits.
#[test]
fn test_historical_reconstruction_with_modifications() {
    let bitmap: BitMap<4> = BitMap::new();

    // Simple modification scenario
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push(true).push(false).push(true);
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.set_bit(0, false);
            dirty.push(false);
        })
        .unwrap();

    let state_at_1 = bitmap.get_at_commit(1).unwrap();
    assert_eq!(state_at_1.len(), 3);
    assert!(state_at_1.get_bit(0)); // Original true
    assert!(!state_at_1.get_bit(1));

    let state_at_2 = bitmap.get_at_commit(2).unwrap();
    assert_eq!(state_at_2.len(), 4);
    assert!(!state_at_2.get_bit(0)); // Modified to false
    assert!(!state_at_2.get_bit(3)); // Appended

    // Multiple successive modifications
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push_chunk(&hex!("0xFF00FF00"));
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.set_bit(0, false);
            dirty.set_bit(8, true);
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(3, |dirty| {
            dirty.set_bit(16, false);
            dirty.set_bit(24, true);
        })
        .unwrap();

    let state_at_1 = bitmap.get_at_commit(1).unwrap();
    assert!(state_at_1.get_bit(0));
    assert!(!state_at_1.get_bit(8));

    let state_at_2 = bitmap.get_at_commit(2).unwrap();
    assert!(!state_at_2.get_bit(0)); // Modified
    assert!(state_at_2.get_bit(8)); // Modified
    assert!(state_at_2.get_bit(16)); // Not yet modified

    let state_at_3 = bitmap.get_at_commit(3).unwrap();
    assert!(!state_at_3.get_bit(16)); // Modified
    assert!(state_at_3.get_bit(24)); // Modified

    // Modifications combined with appends
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..4 {
                dirty.push(true);
            }
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.set_bit(0, false).set_bit(2, false);
            dirty.push(false).push(false);
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(3, |dirty| {
            dirty.set_bit(1, false).set_bit(3, false);
            dirty.push(true).push(true);
        })
        .unwrap();

    let state_at_1 = bitmap.get_at_commit(1).unwrap();
    assert_eq!(state_at_1.len(), 4);
    for i in 0..4 {
        assert!(state_at_1.get_bit(i)); // All true
    }

    let state_at_2 = bitmap.get_at_commit(2).unwrap();
    assert_eq!(state_at_2.len(), 6);
    assert!(!state_at_2.get_bit(0)); // Modified
    assert!(state_at_2.get_bit(1)); // Unchanged
    assert!(!state_at_2.get_bit(4)); // Appended false

    let state_at_3 = bitmap.get_at_commit(3).unwrap();
    assert_eq!(state_at_3.len(), 8);
    assert!(!state_at_3.get_bit(1)); // Modified in commit 3
    assert!(state_at_3.get_bit(6)); // Appended in commit 3
}

/// Test historical reconstruction with length-changing operations (appends and pops).
#[test]
fn test_historical_reconstruction_with_length_changes() {
    let bitmap: BitMap<4> = BitMap::new();

    // Pure append operations
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push(true).push(false);
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.push(true).push(true);
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(3, |dirty| {
            dirty.push(false).push(false);
        })
        .unwrap();

    assert_eq!(bitmap.get_at_commit(1).unwrap().len(), 2);
    assert_eq!(bitmap.get_at_commit(2).unwrap().len(), 4);
    assert_eq!(bitmap.get_at_commit(3).unwrap().len(), 6);

    // Pops followed by appends
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for i in 0..5 {
                dirty.push(i % 2 == 0);
            }
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.pop();
            dirty.pop();
        })
        .unwrap();
    let bitmap = bitmap
        .apply_batch(3, |dirty| {
            dirty.push(true).push(true).push(true);
        })
        .unwrap();

    let state_1 = bitmap.get_at_commit(1).unwrap();
    assert_eq!(state_1.len(), 5);
    assert!(state_1.get_bit(0)); // true
    assert!(!state_1.get_bit(1)); // false
    assert!(state_1.get_bit(4)); // true

    let state_2 = bitmap.get_at_commit(2).unwrap();
    assert_eq!(state_2.len(), 3);

    let state_3 = bitmap.get_at_commit(3).unwrap();
    assert_eq!(state_3.len(), 6);
    assert!(state_3.get_bit(3));
    assert!(state_3.get_bit(5));
}

/// Test historical reconstruction with bitmap chunk pruning.
#[test]
fn test_historical_reconstruction_with_pruning() {
    let bitmap: BitMap<4> = BitMap::new();

    // Commit 1: Create 64 bits (2 chunks), no pruning
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push_chunk(&hex!("0xAABBCCDD"));
            dirty.push_chunk(&hex!("0x11223344"));
        })
        .unwrap();

    // Commit 2: Prune first chunk
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.prune_to_bit(32);
        })
        .unwrap();
    assert_eq!(bitmap.pruned_chunks(), 1);

    // Reconstruct state before pruning
    let state_at_1 = bitmap.get_at_commit(1).unwrap();
    assert_eq!(state_at_1.len(), 64);
    assert_eq!(state_at_1.pruned_chunks(), 0); // No pruning
    assert_eq!(state_at_1.get_chunk_containing(0), &hex!("0xAABBCCDD")); // Restored
    assert_eq!(state_at_1.get_chunk_containing(32), &hex!("0x11223344"));

    // Reconstruct state after pruning
    let state_at_2 = bitmap.get_at_commit(2).unwrap();
    assert_eq!(state_at_2.len(), 64);
    assert_eq!(state_at_2.pruned_chunks(), 1); // Pruning preserved
    assert_eq!(state_at_2.get_chunk_containing(32), &hex!("0x11223344"));
}

/// Test edge cases in historical reconstruction.
#[test]
fn test_historical_reconstruction_edge_cases() {
    let bitmap: BitMap<4> = BitMap::new();

    let bitmap = bitmap
        .apply_batch(10, |dirty| {
            dirty.push(true);
        })
        .unwrap();

    // Nonexistent commits
    assert!(bitmap.get_at_commit(5).is_none());
    assert!(bitmap.get_at_commit(15).is_none());
    assert!(bitmap.get_at_commit(10).is_some());

    // After pruning commit history
    let bitmap: BitMap<4> = BitMap::new();
    let mut bitmap = bitmap;
    for i in 1..=5 {
        bitmap = bitmap
            .apply_batch(i, |dirty| {
                for _ in 0..i {
                    dirty.push(true);
                }
            })
            .unwrap();
    }

    bitmap.prune_commits_before(3);

    // Cannot reconstruct pruned commits
    assert!(bitmap.get_at_commit(1).is_none());
    assert!(bitmap.get_at_commit(2).is_none());

    // Can reconstruct remaining commits
    assert!(bitmap.get_at_commit(3).is_some());
    assert!(bitmap.get_at_commit(4).is_some());
    assert_eq!(bitmap.get_at_commit(3).unwrap().len(), 6); // 1+2+3 bits
}

/// Test dirty modifications on appended bits (regression tests).
#[test]
fn test_dirty_modifications_on_appended_bits() {
    let bitmap: BitMap<4> = BitMap::new();

    // Modify appended bit in same dirty state
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push(true); // Append bit 0
            dirty.set_bit(0, false); // Modify that appended bit
        })
        .unwrap();
    assert_eq!(bitmap.len(), 1);
    assert!(!bitmap.get_bit(0)); // Should be false after modification

    // Push, modify, then pop (should cancel out cleanly)
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.push(true); // Append bit 1
            dirty.set_bit(1, false); // Modify that appended bit
            dirty.pop(); // Remove bit 1
        })
        .unwrap();
    assert_eq!(bitmap.len(), 1); // Only bit 0 remains
}

/// Test pop() behavior with dirty modifications (regression tests).
#[test]
fn test_pop_behavior_with_modifications() {
    let bitmap: BitMap<4> = BitMap::new();

    // Create initial bits
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..10 {
                dirty.push(true);
            }
        })
        .unwrap();

    // pop() should return modified value
    let mut popped_value = true;
    let _bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.set_bit(9, false); // Modify bit 9 in dirty
            popped_value = dirty.pop(); // Should return false (modified)
        })
        .unwrap();
    assert!(
        !popped_value,
        "pop() should return modified value, not original"
    );
}

/// Test reading popped bits should fail.
#[test]
#[should_panic(expected = "out of bounds")]
fn test_read_popped_bit_panics() {
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..10 {
                dirty.push(true);
            }
        })
        .unwrap();

    let mut dirty = bitmap.into_dirty();
    dirty.pop();
    dirty.pop();
    dirty.get_bit(8); // Should panic - bit 8 is now out of bounds
}

/// Test pruning beyond bitmap length should fail.
#[test]
#[should_panic(expected = "beyond projected length")]
fn test_prune_beyond_length_panics() {
    let bitmap: BitMap<4> = BitMap::new();
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..10 {
                dirty.push(true);
            }
        })
        .unwrap();

    let mut dirty = bitmap.into_dirty();
    dirty.pop(); // projected_len = 9
    dirty.prune_to_bit(100); // Should panic - bit 100 is beyond projected length
}

/// Test that get_chunk can read entirely appended chunks.
///
/// This tests the scenario where:
/// 1. We start with an empty (or short) bitmap
/// 2. We append bits that create a chunk entirely in the appended region
/// 3. We call get_chunk on that chunk
///
/// The bug is that when a chunk is entirely appended (chunk_start >= base_len),
/// the range_end calculation (chunk_end.min(base_len)) creates an empty range,
/// so all checks return false and we fall back to current.get_chunk(), which
/// panics because that chunk doesn't exist in current.
#[test]
fn test_get_chunk_on_appended_only_chunk() {
    let bitmap: BitMap<4> = BitMap::new();

    // Start with empty bitmap
    let mut dirty = bitmap.into_dirty();

    // Push 32 bits (fills chunk 0 entirely)
    for i in 0..32 {
        dirty.push(i % 2 == 0); // Alternating pattern: true, false, true, false...
    }

    // Now try to read chunk 0 - this chunk is entirely appended
    let chunk = dirty.get_chunk(0);

    // Verify the alternating pattern: true, false, true, false...
    assert_ne!(chunk[0] & 0x01, 0, "bit 0 should be true");
    assert_eq!(chunk[0] & 0x02, 0, "bit 1 should be false");
    assert_ne!(chunk[0] & 0x04, 0, "bit 2 should be true");
    assert_eq!(chunk[0] & 0x08, 0, "bit 3 should be false");

    // Overall pattern should be 0x55 (binary 01010101)
    assert_eq!(chunk[0], 0x55, "byte 0 should be 0x55");
}

/// Test that get_chunk zeros out bits beyond projected_len after pops.
///
/// This tests the scenario where:
/// 1. We have a chunk with all bits set
/// 2. We pop some bits from the end of that chunk
/// 3. We call get_chunk on that chunk
///
/// The bug is that get_chunk would return the full chunk from current without
/// zeroing out the popped bits, so readers see stale data that will be zeroed
/// after commit.
#[test]
fn test_pop_zeros_chunk_tail() {
    let bitmap: BitMap<4> = BitMap::new();

    // Setup: Create 33 bits (chunk 0 has bits 0-31 all true, chunk 1 has bit 32 true)
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..33 {
                dirty.push(true);
            }
        })
        .unwrap();

    // Start a new dirty and pop 2 bits
    let mut dirty = bitmap.into_dirty();
    dirty.pop(); // projected_len = 32
    dirty.pop(); // projected_len = 31

    // Now bit 31 is out of bounds (projected_len = 31)
    // get_chunk(0) returns chunk 0, which contains bits 0-31
    let chunk = dirty.get_chunk(0);

    // Bit 31 should be zeroed since it's >= projected_len
    let byte_31 = chunk[31 / 8]; // byte 3
    let bit_31_in_byte = 31 % 8; // bit 7
    let bit_31_set = (byte_31 >> bit_31_in_byte) & 1 == 1;

    assert!(!bit_31_set);
}

/// Test pruning a chunk that was just appended in the same dirty state.
///
/// This tests the scenario where:
/// 1. We have a bitmap with some bits (not chunk-aligned)
/// 2. We append enough bits to create a NEW chunk beyond what exists in current
/// 3. We immediately prune that new chunk
///
/// The bug is that prune_to_bit tries to capture chunk data from current,
/// but the new chunk only exists in appended_bits, causing a panic.
#[test]
fn test_prune_freshly_appended_chunk() {
    let bitmap: BitMap<4> = BitMap::new();

    // Start with 10 bits (chunk 0 is partial, no chunk 1)
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..10 {
                dirty.push(true);
            }
        })
        .unwrap();

    assert_eq!(bitmap.current().chunks_len(), 1); // Only chunk 0 exists

    // Now in a new dirty, append 54 more bits
    // This creates chunk 1 (bits 32-63) entirely within the dirty state
    let mut dirty = bitmap.into_dirty();
    for _ in 0..54 {
        dirty.push(true);
    }

    // projected_len = 64, we now have chunks 0 and 1
    // But chunk 1 is ONLY in appended_bits, not in current
    assert_eq!(dirty.len(), 64);

    // Try to prune to bit 64 (prune chunks 0 and 1)
    // This should capture chunk 0 from current (OK)
    // But chunk 1 doesn't exist in current yet!
    dirty.prune_to_bit(64);

    // Should commit successfully
    let _bitmap = dirty.commit(2).unwrap();
}

/// Test that dirty reads correctly see appended bits after pops.
///
/// This tests the scenario where:
/// 1. We start with a bitmap of length N
/// 2. Pop some bits (reducing length to M < N)
/// 3. Push new bits (growing length back toward N)
/// 4. Read those newly pushed bits within the same dirty state
///
/// The bug was that `get_bit` and `get_chunk_containing` checked `bit >= base_len`
/// to identify appended bits, but after net pops, the appended region actually
/// starts at `projected_len - appended_bits.len()`, which is less than `base_len`.
/// This caused reads to fall through to the stale underlying bitmap instead of
/// reading from the dirty's `appended_bits` vector.
#[test]
fn test_read_appended_bits_after_pops() {
    let bitmap: BitMap<4> = BitMap::new();

    // Setup: Create bitmap with 10 bits, all set to true
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for _ in 0..10 {
                dirty.push(true);
            }
        })
        .unwrap();

    // Start dirty: pop 3 bits, then push 2 bits with value false
    let mut dirty = bitmap.into_dirty();
    dirty.pop(); // projected_len = 9
    dirty.pop(); // projected_len = 8
    dirty.pop(); // projected_len = 7
    dirty.push(false); // projected_len = 8, appended_bits = [false]
    dirty.push(false); // projected_len = 9, appended_bits = [false, false]

    // The appended region is now [7, 9), not [10, 12)
    // Verify get_bit sees the new false values, not the old true values
    assert!(!dirty.get_bit(7));
    assert!(!dirty.get_bit(8));

    // Verify get_chunk also reconstructs correctly
    let chunk = dirty.get_chunk(0); // Chunk containing bits 0..31
    assert_eq!(chunk[0] & 0x80, 0, "bit 7 should be false in chunk");
    assert_eq!(chunk[1] & 0x01, 0, "bit 8 should be false in chunk");

    // Also verify we can modify appended bits
    dirty.set_bit(7, true);
    assert!(dirty.get_bit(7));

    // Commit and verify the final state
    let bitmap = dirty.commit(2).unwrap();
    assert_eq!(bitmap.len(), 9);
    assert!(bitmap.get_bit(7));
    assert!(!bitmap.get_bit(8));
}

/// Test historical reconstruction when current state has MORE pruning than target.
///
/// This tests the scenario where:
/// 1. We commit a state with some unpruned data
/// 2. We prune that data in a later commit
/// 3. We try to reconstruct the earlier state (which needs the now-pruned data)
///
/// The diff system should have captured the pruned chunk data as `ChunkDiff::Pruned`,
/// allowing reconstruction even though that chunk no longer exists in current state.
#[test]
fn test_reconstruct_less_pruned_from_more_pruned() {
    let bitmap: BitMap<4> = BitMap::new();

    // Commit 1: Create 64 bits (2 chunks) with pattern
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for i in 0..64 {
                dirty.push(i < 32); // First chunk all true, second chunk all false
            }
        })
        .unwrap();
    assert_eq!(bitmap.len(), 64);
    assert_eq!(bitmap.pruned_chunks(), 0);

    // Commit 2: Prune first chunk
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.prune_to_bit(32); // Prune chunk 0
        })
        .unwrap();
    assert_eq!(bitmap.len(), 64);
    assert_eq!(bitmap.pruned_chunks(), 1);

    // Now reconstruct commit 1 (which has chunk 0 unpruned)
    // This requires getting chunk 0 from the diff, not from current state
    let reconstructed = bitmap
        .get_at_commit(1)
        .expect("should be able to reconstruct less-pruned state");

    assert_eq!(reconstructed.len(), 64);
    assert_eq!(reconstructed.pruned_chunks(), 0, "commit 1 had no pruning");

    // Verify the data is correct
    for i in 0..32 {
        assert!(reconstructed.get_bit(i));
    }
    for i in 32..64 {
        assert!(!reconstructed.get_bit(i));
    }
}

/// Test historical reconstruction when all non-pruned bits are in pruned chunks.
///
/// This tests the scenario where:
/// 1. We have a bitmap with some bits (e.g., 32 bits = 1 chunk)
/// 2. We prune all chunks (e.g., prune chunk 0, so only pruned metadata remains)
/// 3. We commit this state
/// 4. We try to reconstruct this historical state via `get_at_commit`
///
/// The bug was that `apply_reverse_diff` always computed `raw_last_chunk` from
/// `target_len - 1`, but when `target_len <= target_pruned * CHUNK_SIZE_BITS`,
/// this gives a chunk index that's less than `raw_first_chunk` (the first
/// unpruned chunk). The code then tried to access this pruned chunk from
/// `newer_state`, causing a panic. The fix is to detect this case early and
/// return an empty bitmap with the correct pruning metadata.
#[test]
fn test_reconstruct_fully_pruned_commit() {
    let bitmap: BitMap<4> = BitMap::new();

    // Commit 1: Create 32 bits (1 complete chunk)
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            for i in 0..32 {
                dirty.push(i % 2 == 0); // Alternating pattern
            }
        })
        .unwrap();
    assert_eq!(bitmap.len(), 32);
    assert_eq!(bitmap.pruned_chunks(), 0);

    // Commit 2: Prune the entire chunk
    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.prune_to_bit(32); // Prune chunk 0 (bits 0..32)
        })
        .unwrap();
    assert_eq!(bitmap.len(), 32);
    assert_eq!(bitmap.pruned_chunks(), 1);

    // The bitmap now has 32 bits but they're all in pruned chunks
    // Try to reconstruct commit 2 - this should not panic
    let reconstructed = bitmap
        .get_at_commit(2)
        .expect("should be able to reconstruct fully pruned commit");

    assert_eq!(reconstructed.len(), 32, "length should match");
    assert_eq!(
        reconstructed.pruned_chunks(),
        1,
        "should have 1 pruned chunk"
    );

    // Also test reconstruction of commit 1 (before pruning)
    let before_prune = bitmap
        .get_at_commit(1)
        .expect("should be able to reconstruct pre-prune state");

    assert_eq!(before_prune.len(), 32);
    assert_eq!(before_prune.pruned_chunks(), 0);
    // Verify the alternating pattern
    for i in 0..32 {
        assert_eq!(before_prune.get_bit(i), i % 2 == 0);
    }
}

/// Verify historical bitmap reconstruction correctness by comparing to another bitmap.
///
/// This test creates a "ground truth" (`Prunable`) bitmap alongside the `Historical` bitmap.
/// Both bitmaps receive the same random operations. After each commit, we save the ground
/// truth state. At the end, we reconstruct each commit from the `Historical` bitmap and
/// verify it matches the saved ground truth state bit-for-bit.
fn test_randomized_helper<R: rand::Rng>(rng: &mut R) {
    // Test configuration
    const NUM_COMMITS: u64 = 20;
    const OPERATIONS_PER_COMMIT: usize = 32;
    const CHUNK_SIZE_BITS: u64 = Prunable::<4>::CHUNK_SIZE_BITS;

    // Operation probability thresholds (out of 100)
    // These define a probability distribution over different operations
    const PROB_PUSH: u64 = 55; // 0-54: 55% chance to push a new bit
    const PROB_MODIFY: u64 = 75; // 55-74: 20% chance to modify existing bit
    const PROB_POP: u64 = 90; // 75-89: 15% chance to pop last bit
    const PROB_PRUNE: u64 = 100; // 90-99: 10% chance to prune (if possible)

    let mut bitmap: BitMap<4> = BitMap::new();
    let mut ground_truth = Prunable::<4>::new();
    let mut checkpoints: Vec<(u64, Prunable<4>)> = Vec::new();

    // Perform random operations across multiple commits
    for commit_num in 0..NUM_COMMITS {
        let initial_len = ground_truth.len();
        let initial_pruned = ground_truth.pruned_chunks();

        // Track current state within this dirty state (changes as we apply operations)
        let mut current_len = initial_len;
        let mut current_pruned = initial_pruned;

        let mut dirty = bitmap.into_dirty();

        for _ in 0..OPERATIONS_PER_COMMIT {
            // Pick a random operation based on probability distribution
            let op_choice = rng.gen_range(0..100);

            // Special case: if bitmap is empty, we can only push
            if current_len == 0 {
                let bit_value = rng.gen_bool(0.5);
                dirty.push(bit_value);
                ground_truth.push(bit_value);
                current_len += 1;
                continue;
            }

            // Operation: PUSH (55% probability)
            if op_choice < PROB_PUSH {
                let bit_value = rng.gen_bool(0.5);
                dirty.push(bit_value);
                ground_truth.push(bit_value);
                current_len += 1;
            }
            // Operation: MODIFY existing bit (20% probability)
            else if op_choice < PROB_MODIFY {
                let bit = rng.gen_range(0..current_len);
                let new_value = rng.gen_bool(0.5);

                // Safety: Only modify bits that aren't pruned
                let chunk_idx = Prunable::<4>::to_chunk_index(bit);
                if chunk_idx >= current_pruned {
                    dirty.set_bit(bit, new_value);
                    ground_truth.set_bit(bit, new_value);
                }
            }
            // Operation: POP last bit (15% probability)
            else if op_choice < PROB_POP {
                dirty.pop();
                ground_truth.pop();
                current_len -= 1;
            }
            // Operation: PRUNE to random chunk boundary (10% probability)
            else if op_choice < PROB_PRUNE {
                // Calculate the maximum chunk we can prune to (keep at least 1 chunk of data)
                let total_chunks = (current_len / CHUNK_SIZE_BITS) as usize;
                let max_prune_chunk = total_chunks.saturating_sub(1);

                // Only prune if there's at least one unpruned complete chunk we can prune
                if max_prune_chunk > current_pruned {
                    // Randomly pick a chunk boundary to prune to (between current_pruned+1 and max)
                    let prune_chunk = rng.gen_range((current_pruned + 1)..=max_prune_chunk);
                    let prune_to = (prune_chunk as u64) * CHUNK_SIZE_BITS;

                    dirty.prune_to_bit(prune_to);
                    ground_truth.prune_to_bit(prune_to);
                    current_pruned = prune_chunk;
                }
            }
        }

        bitmap = dirty.commit(commit_num).unwrap();

        // Save checkpoint for verification
        checkpoints.push((commit_num, ground_truth.clone()));
    }

    // Verify all checkpoints match reconstructed states
    for (commit_num, checkpoint) in &checkpoints {
        let reconstructed = bitmap.get_at_commit(*commit_num).unwrap();

        assert_eq!(
            reconstructed.len(),
            checkpoint.len(),
            "Length mismatch at commit {commit_num}"
        );
        assert_eq!(
            reconstructed.pruned_chunks(),
            checkpoint.pruned_chunks(),
            "Pruned chunks mismatch at commit {commit_num}"
        );

        // Verify all accessible bits
        let start_bit = reconstructed.pruned_chunks() as u64 * Prunable::<4>::CHUNK_SIZE_BITS;
        for i in start_bit..checkpoint.len() {
            let expected = checkpoint.get_bit(i);
            let actual = reconstructed.get_bit(i);
            assert_eq!(
                actual, expected,
                "Bit {i} mismatch at commit {commit_num} (expected {expected}, got {actual})"
            );
        }
    }
}

/// Run property-based tests with multiple seeds to explore the state space.
///
/// Tests 101 different random operation sequences (seeds 0-100) to ensure
/// historical reconstruction works correctly across a wide variety of scenarios.
#[test]
fn test_randomized_with_multiple_seeds() {
    use rand::{rngs::StdRng, SeedableRng};
    for seed in 0..=100 {
        let mut rng = StdRng::seed_from_u64(seed);
        test_randomized_helper(&mut rng);
    }
}

#[test]
#[should_panic(expected = "bit pruned: 31")]
fn test_pop_into_pruned_region_panics() {
    let bitmap: BitMap<4> = BitMap::new();

    // Create a bitmap with 64 bits (2 chunks), then prune first chunk
    let bitmap = bitmap
        .apply_batch(1, |dirty| {
            dirty.push_chunk(&[0xFF; 4]);
            dirty.push_chunk(&[0xFF; 4]);
        })
        .unwrap();

    let bitmap = bitmap
        .apply_batch(2, |dirty| {
            dirty.prune_to_bit(32);
        })
        .unwrap();

    // Now we have: len=64, pruned_chunks=1 (32 pruned bits, 32 live bits)
    assert_eq!(bitmap.len(), 64);
    assert_eq!(bitmap.pruned_chunks(), 1);

    // Try to pop past the prune boundary
    // This should panic with "cannot pop into pruned region"
    let mut dirty = bitmap.into_dirty();
    for _ in 0..33 {
        // Pop 33 times (32 live bits + 1 pruned bit)
        dirty.pop();
    }
}

#[test]
fn test_commit_u64_max_is_reserved() {
    let bitmap: BitMap<4> = BitMap::new();

    // Verify that u64::MAX cannot be used as a commit number
    let result = bitmap.apply_batch(u64::MAX, |dirty| {
        dirty.push(true);
    });

    assert!(matches!(result, Err(Error::ReservedCommitNumber)));

    // Verify that u64::MAX - 1 can be used
    let bitmap: BitMap<4> = BitMap::new();
    let result = bitmap.apply_batch(u64::MAX - 1, |dirty| {
        dirty.push(true);
    });

    assert!(result.is_ok());
    let bitmap = result.unwrap();
    assert_eq!(bitmap.len(), 1);

    // Verify get_at_commit returns None for u64::MAX
    assert!(bitmap.get_at_commit(u64::MAX).is_none());

    // But works for u64::MAX - 1
    assert!(bitmap.get_at_commit(u64::MAX - 1).is_some());
}