neat 1.0.2

Crate for working with NEAT in rust
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
use std::collections::HashMap;

use crate::{activation::builtin::linear_activation, *};
use genetic_rs::prelude::rand::{rngs::StdRng, SeedableRng};
use rayon::prelude::*;

#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum GraphCheckState {
    CurrentCycle,
    Checked,
}

fn assert_graph_invariants<const I: usize, const O: usize>(net: &NeuralNetwork<I, O>) {
    let mut visited = HashMap::new();

    for i in 0..I {
        dfs(net, NeuronLocation::Input(i), &mut visited);
    }

    for i in 0..net.hidden_layers.len() {
        let loc = NeuronLocation::Hidden(i);
        if !visited.contains_key(&loc) {
            panic!("hanging neuron: {loc:?}");
        }
    }
}

// simple colored dfs for checking graph invariants.
fn dfs<const I: usize, const O: usize>(
    net: &NeuralNetwork<I, O>,
    loc: NeuronLocation,
    visited: &mut HashMap<NeuronLocation, GraphCheckState>,
) {
    if let Some(existing) = visited.get(&loc) {
        match *existing {
            GraphCheckState::CurrentCycle => panic!("cycle detected on {loc:?}"),
            GraphCheckState::Checked => return,
        }
    }

    visited.insert(loc, GraphCheckState::CurrentCycle);

    for loc2 in net[loc].outputs.keys() {
        dfs(net, *loc2, visited);
    }

    visited.insert(loc, GraphCheckState::Checked);
}

struct InputCountsCache<const O: usize> {
    hidden_layers: Vec<usize>,
    output: [usize; O],
}

impl<const O: usize> InputCountsCache<O> {
    fn tally(&mut self, loc: NeuronLocation) {
        match loc {
            NeuronLocation::Input(_) => panic!("input neurons can't have inputs"),
            NeuronLocation::Hidden(i) => self.hidden_layers[i] += 1,
            NeuronLocation::Output(i) => self.output[i] += 1,
        }
    }
}

// asserts that cached/tracked values are correct. mainly only used for
// input count and such
fn assert_cache_consistency<const I: usize, const O: usize>(net: &NeuralNetwork<I, O>) {
    let mut cache = InputCountsCache {
        hidden_layers: vec![0; net.hidden_layers.len()],
        output: [0; O],
    };

    for i in 0..I {
        let n = &net[NeuronLocation::Input(i)];
        for loc in n.outputs.keys() {
            cache.tally(*loc);
        }
    }

    for n in &net.hidden_layers {
        for loc in n.outputs.keys() {
            cache.tally(*loc);
        }
    }

    for (i, x) in cache.hidden_layers.into_iter().enumerate() {
        if x == 0 {
            // redundant because of graph invariants, but better safe than sorry
            panic!("found hanging neuron");
        }

        assert_eq!(x, net.hidden_layers[i].input_count);
    }

    for (i, x) in cache.output.into_iter().enumerate() {
        assert_eq!(x, net.output_layer[i].input_count);
    }
}

fn assert_network_invariants<const I: usize, const O: usize>(net: &NeuralNetwork<I, O>) {
    assert_graph_invariants(net);
    assert_cache_consistency(net);
    // TODO other invariants
}

const TEST_COUNT: u64 = 1000;
fn rng_test(test: impl Fn(&mut StdRng) + Sync) {
    (0..TEST_COUNT).into_par_iter().for_each(|seed| {
        let mut rng = StdRng::seed_from_u64(seed);
        test(&mut rng);
    });
}

#[test]
fn create_network() {
    rng_test(|rng| {
        let net = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net);
    });
}

#[test]
fn split_connection() {
    // rng doesn't matter here since it's just adding bias in eval
    let mut rng = StdRng::seed_from_u64(0xabcdef);

    let mut net = NeuralNetwork::<1, 1>::new(&mut rng);
    assert_network_invariants(&net);

    net.split_connection(
        Connection {
            from: NeuronLocation::Input(0),
            to: NeuronLocation::Output(0),
        },
        &mut rng,
    );
    assert_network_invariants(&net);

    assert_eq!(
        *net.input_layer[0].outputs.keys().next().unwrap(),
        NeuronLocation::Hidden(0)
    );
    assert_eq!(
        *net.hidden_layers[0].outputs.keys().next().unwrap(),
        NeuronLocation::Output(0)
    );
}

#[test]
fn add_connection() {
    let mut rng = StdRng::seed_from_u64(0xabcdef);
    let mut net = NeuralNetwork {
        input_layer: [Neuron::new_with_activation(
            HashMap::new(),
            activation_fn!(linear_activation),
            &mut rng,
        )],
        hidden_layers: vec![],
        output_layer: [Neuron::new_with_activation(
            HashMap::new(),
            activation_fn!(linear_activation),
            &mut rng,
        )],
    };
    assert_network_invariants(&net);

    let mut conn = Connection {
        from: NeuronLocation::Input(0),
        to: NeuronLocation::Output(0),
    };
    assert!(net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    assert!(!net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    let mut outputs = HashMap::new();
    outputs.insert(NeuronLocation::Output(0), 0.1);
    let n = Neuron::new_with_activation(outputs, activation_fn!(linear_activation), &mut rng);

    net.add_neuron(n.clone());
    // temporarily broken invariants bc of hanging neuron

    conn.to = NeuronLocation::Hidden(0);
    assert!(net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    net.add_neuron(n);

    conn.to = NeuronLocation::Hidden(1);
    assert!(net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    conn.from = NeuronLocation::Hidden(0);
    assert!(net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    net.split_connection(conn, &mut rng);
    assert_network_invariants(&net);

    conn.from = NeuronLocation::Hidden(2);
    conn.to = NeuronLocation::Hidden(0);

    assert!(!net.add_connection(conn, 0.1));
    assert_network_invariants(&net);

    // random stress testing
    rng_test(|rng| {
        let mut net = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net);
        for _ in 0..50 {
            net.add_random_connection(10, rng);
            assert_network_invariants(&net);
        }
    });
}

#[test]
fn add_connection_converging_paths() {
    // Validates that is_connection_safe allows "diamond" DAG topologies where a single
    // node has two outgoing paths that both reach the same downstream neuron:
    //   Hidden(0) -> Output(0)               (direct)
    //   Hidden(0) -> Hidden(1) -> Output(0)  (indirect)
    // Adding Input(0) -> Hidden(0) is safe (no cycle); the original dfs falsely
    // rejected it by treating the second visit to Output(0) as a cycle.
    let mut rng = StdRng::seed_from_u64(0xabcdef);

    let mut net = NeuralNetwork {
        input_layer: [Neuron::new_with_activation(
            HashMap::new(),
            activation_fn!(linear_activation),
            &mut rng,
        )],
        hidden_layers: vec![
            Neuron::new_with_activation(
                HashMap::new(),
                activation_fn!(linear_activation),
                &mut rng,
            ),
            Neuron::new_with_activation(
                HashMap::new(),
                activation_fn!(linear_activation),
                &mut rng,
            ),
        ],
        output_layer: [Neuron::new_with_activation(
            HashMap::new(),
            activation_fn!(linear_activation),
            &mut rng,
        )],
    };

    // Build the diamond: Hidden(0) -> Output(0) and Hidden(0) -> Hidden(1) -> Output(0)
    assert!(net.add_connection(
        Connection {
            from: NeuronLocation::Hidden(0),
            to: NeuronLocation::Output(0)
        },
        1.0
    ));
    assert!(net.add_connection(
        Connection {
            from: NeuronLocation::Hidden(1),
            to: NeuronLocation::Output(0)
        },
        1.0
    ));
    assert!(net.add_connection(
        Connection {
            from: NeuronLocation::Hidden(0),
            to: NeuronLocation::Hidden(1)
        },
        1.0
    ));

    // Input(0) -> Hidden(0) is safe (no cycle), but the original dfs falsely rejected it
    // because traversing from Hidden(0) visits Output(0) via the direct path first, then
    // encounters Output(0) again via Hidden(1), treating the revisit as a cycle.
    assert!(net.add_connection(
        Connection {
            from: NeuronLocation::Input(0),
            to: NeuronLocation::Hidden(0)
        },
        1.0
    ));

    assert_network_invariants(&net);
}

#[test]
fn remove_connection() {
    let mut rng = StdRng::seed_from_u64(0xabcdef);
    let mut net = NeuralNetwork {
        input_layer: [Neuron::new_with_activation(
            HashMap::from([
                (NeuronLocation::Output(0), 0.1),
                (NeuronLocation::Hidden(0), 1.0),
            ]),
            activation_fn!(linear_activation),
            &mut rng,
        )],
        hidden_layers: vec![Neuron {
            input_count: 1,
            outputs: HashMap::new(), // not sure whether i want neurons with no outputs to break the invariant/be removed
            bias: 0.0,
            activation_fn: activation_fn!(linear_activation),
        }],
        output_layer: [Neuron {
            input_count: 1,
            outputs: HashMap::new(),
            bias: 0.0,
            activation_fn: activation_fn!(linear_activation),
        }],
    };
    assert_network_invariants(&net);

    assert!(!net.remove_connection(Connection {
        from: NeuronLocation::Input(0),
        to: NeuronLocation::Output(0)
    }));
    assert_network_invariants(&net);

    assert!(net.remove_connection(Connection {
        from: NeuronLocation::Input(0),
        to: NeuronLocation::Hidden(0)
    }));
    assert_network_invariants(&net);

    rng_test(|rng| {
        let mut net = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net);

        for _ in 0..70 {
            net.add_random_connection(10, rng);
            assert_network_invariants(&net);

            if rng.random_bool(0.25) {
                // rng allows network to form more complex edge cases.
                net.remove_random_connection(5, rng);
                // don't need to remove neuron since this
                // method handles it automatically.
                assert_network_invariants(&net);
            }
        }
    });
}

// TODO remove_neuron test

#[test]
fn predict_basic() {
    // build a minimal 1-in / 1-out network with linear activations and zero bias
    // so the output is exactly: input * weight.
    let weight = 0.5_f32;
    let net = NeuralNetwork {
        input_layer: [Neuron {
            input_count: 0,
            outputs: HashMap::from([(NeuronLocation::Output(0), weight)]),
            bias: 0.0,
            activation_fn: activation_fn!(linear_activation),
        }],
        hidden_layers: vec![],
        output_layer: [Neuron {
            input_count: 1,
            outputs: HashMap::new(),
            bias: 0.0,
            activation_fn: activation_fn!(linear_activation),
        }],
    };

    let inputs = [2.0_f32];
    let outputs = net.predict(inputs);
    let expected = inputs[0] * weight;
    assert!(
        (outputs[0] - expected).abs() < 1e-5,
        "expected {expected}, got {}",
        outputs[0]
    );

    // zero input should yield zero output (no bias).
    let outputs_zero = net.predict([0.0]);
    assert!(
        outputs_zero[0].abs() < 1e-5,
        "expected 0.0, got {}",
        outputs_zero[0]
    );

    // stress-test with random networks using default (sigmoid) output activations.
    // use a sequential loop to avoid nested rayon parallelism (predict uses rayon internally).
    for seed in 0..TEST_COUNT {
        let mut rng = StdRng::seed_from_u64(seed);
        let net = NeuralNetwork::<5, 5>::new(&mut rng);
        let inputs = [0.1, 0.2, 0.3, 0.4, 0.5];
        let outputs = net.predict(inputs);
        // sigmoid outputs are in the open interval (0, 1)
        for &v in &outputs {
            assert!(v > 0.0 && v < 1.0, "sigmoid output {v} out of range (0, 1)");
        }
    }
}

#[test]
fn predict_consistency() {
    // repeated calls with the same inputs must return results within floating-point
    // tolerance. exact equality is not guaranteed because the parallel atomic
    // accumulation order may vary between runs.
    // use a sequential loop to avoid nested rayon parallelism (predict uses rayon internally).
    for seed in 0..TEST_COUNT {
        let mut rng = StdRng::seed_from_u64(seed);
        let net = NeuralNetwork::<5, 3>::new(&mut rng);
        let inputs = [1.0, -1.0, 0.5, 0.0, -0.5];
        let first = net.predict(inputs);
        for _ in 0..5 {
            let result = net.predict(inputs);
            for (a, b) in first.iter().zip(result.iter()) {
                assert!(
                    (a - b).abs() < 1e-5,
                    "predict returned inconsistent results: {a} vs {b}"
                );
            }
        }
    }
}

#[test]
fn predict_parallel_no_deadlock() {
    // build a network with a more complex topology via mutation, then run many
    // parallel predictions to verify that the internal parallel evaluation path
    // completes without deadlocks or race conditions.
    let mut rng = StdRng::seed_from_u64(0xdeadbeef);
    let settings = MutationSettings::default();
    let mut net = NeuralNetwork::<4, 2>::new(&mut rng);
    for _ in 0..20 {
        net.mutate(&settings, 0.5, &mut rng);
    }

    let results: Vec<[f32; 2]> = (0..100_u32)
        .into_par_iter()
        .map(|i| {
            let inputs = [i as f32 * 0.01, 0.5, -0.3, 1.0];
            net.predict(inputs)
        })
        .collect();

    // all outputs should be finite (no NaN / inf from race conditions).
    for outputs in &results {
        for &v in outputs {
            assert!(v.is_finite(), "non-finite output {v} detected");
        }
    }
}

const NUM_MUTATIONS: usize = 50;
const MUTATION_RATE: f32 = 0.25;
#[test]
fn mutate() {
    rng_test(|rng| {
        let mut net = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net);

        let settings = MutationSettings::default();

        for _ in 0..NUM_MUTATIONS {
            net.mutate(&settings, MUTATION_RATE, rng);
            assert_network_invariants(&net);
        }
    });
}

#[test]
fn crossover() {
    rng_test(|rng| {
        let mut net1 = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net1);

        let mut net2 = NeuralNetwork::<10, 10>::new(rng);
        assert_network_invariants(&net2);

        let settings = ReproductionSettings::default();

        for _ in 0..NUM_MUTATIONS {
            let a = net1.crossover(&net2, &settings, MUTATION_RATE, rng);
            assert_network_invariants(&a);

            let b = net2.crossover(&net1, &settings, MUTATION_RATE, rng);
            assert_network_invariants(&b);

            net1 = a;
            net2 = b;
        }
    });
}

#[cfg(feature = "serde")]
mod serde {
    use super::rng_test;
    use crate::*;

    #[test]
    fn full_serde() {
        rng_test(|rng| {
            let net1 = NeuralNetwork::<10, 10>::new(rng);

            let mut buf = Vec::new();
            let writer = std::io::Cursor::new(&mut buf);
            let mut serializer = serde_json::Serializer::new(writer);

            serde_path_to_error::serialize(&net1, &mut serializer).unwrap();
            let serialized = serde_json::to_string(&net1).unwrap();
            let net2: NeuralNetwork<10, 10> = serde_json::from_str(&serialized).unwrap();
            assert_eq!(net1, net2);
        });
    }
}

mod debug {
    use crate::*;

    #[allow(dead_code)]
    fn find_cycle_helper<const I: usize, const O: usize>(
        net: &NeuralNetwork<I, O>,
    ) -> Option<Vec<NeuronLocation>> {
        use std::collections::HashMap as HM;
        fn dfs<const I: usize, const O: usize>(
            net: &NeuralNetwork<I, O>,
            loc: NeuronLocation,
            visited: &mut HM<NeuronLocation, bool>,
            path: &mut Vec<NeuronLocation>,
        ) -> Option<Vec<NeuronLocation>> {
            if let Some(&in_progress) = visited.get(&loc) {
                if in_progress {
                    let s = path.iter().position(|&x| x == loc).unwrap();
                    return Some(path[s..].to_vec());
                }
                return None;
            }
            visited.insert(loc, true);
            path.push(loc);
            for loc2 in net[loc].outputs.keys() {
                if let Some(c) = dfs(net, *loc2, visited, path) {
                    return Some(c);
                }
            }
            path.pop();
            visited.insert(loc, false);
            None
        }
        let mut visited = HM::new();
        for i in 0..I {
            if let Some(c) = dfs(net, NeuronLocation::Input(i), &mut visited, &mut vec![]) {
                return Some(c);
            }
        }
        for i in 0..net.hidden_layers.len() {
            let loc = NeuronLocation::Hidden(i);
            if !visited.contains_key(&loc) {
                if let Some(c) = dfs(net, loc, &mut visited, &mut vec![]) {
                    return Some(c);
                }
            }
        }
        None
    }

    #[ignore]
    #[test]
    fn debug_locate_cycle_source() {
        // Run with no mutations to see if remove_cycles itself fails
        let mut settings_no_mut = ReproductionSettings::default();
        settings_no_mut.mutation_passes = 0;

        let mut found_no_mut = false;
        let mut found_with_mut = false;

        'outer: for seed in 0..300u64 {
            let mut rng = StdRng::seed_from_u64(seed);
            let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
            let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);
            for iter in 0..100usize {
                let a = net1.crossover(&net2, &settings_no_mut, 0.25, &mut rng);
                let b = net2.crossover(&net1, &settings_no_mut, 0.25, &mut rng);
                if let Some(cycle) = find_cycle_helper(&a) {
                    println!(
                        "remove_cycles FAILED: seed={} iter={} (a): {:?}",
                        seed, iter, cycle
                    );
                    found_no_mut = true;
                    break 'outer;
                }
                if let Some(cycle) = find_cycle_helper(&b) {
                    println!(
                        "remove_cycles FAILED: seed={} iter={} (b): {:?}",
                        seed, iter, cycle
                    );
                    found_no_mut = true;
                    break 'outer;
                }
                net1 = a;
                net2 = b;
            }
        }
        if !found_no_mut {
            println!(
                "remove_cycles seems correct (no cycles in 300 seeds x 100 iters without mutation)"
            );
        }

        // Run with mutations to see if mutation introduces cycles
        let settings_with_mut = ReproductionSettings::default(); // mutation_passes = 3

        'outer2: for seed in 0..300u64 {
            let mut rng = StdRng::seed_from_u64(seed);
            let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
            let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);
            for iter in 0..50usize {
                let a = net1.crossover(&net2, &settings_with_mut, 0.25, &mut rng);
                let b = net2.crossover(&net1, &settings_with_mut, 0.25, &mut rng);
                if let Some(cycle) = find_cycle_helper(&a) {
                    println!(
                        "Mutation introduced cycle: seed={} iter={} (a): {:?}",
                        seed, iter, cycle
                    );
                    found_with_mut = true;
                    break 'outer2;
                }
                if let Some(cycle) = find_cycle_helper(&b) {
                    println!(
                        "Mutation introduced cycle: seed={} iter={} (b): {:?}",
                        seed, iter, cycle
                    );
                    found_with_mut = true;
                    break 'outer2;
                }
                net1 = a;
                net2 = b;
            }
        }
        if !found_with_mut {
            println!("Mutations don't introduce cycles either (no cycles found)");
        }

        assert!(!found_no_mut, "remove_cycles is broken");
        assert!(
            !found_with_mut,
            "mutation is adding cycles (is_connection_safe is broken)"
        );
    }

    #[ignore]
    #[test]
    fn debug_find_bad_connection() {
        // Reproduce: seed=0, iter=47 introduces a cycle
        let mut rng = StdRng::seed_from_u64(0);
        let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
        let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);
        let settings = ReproductionSettings::default();

        for _iter in 0..47usize {
            let a = net1.crossover(&net2, &settings, 0.25, &mut rng);
            let b = net2.crossover(&net1, &settings, 0.25, &mut rng);
            net1 = a;
            net2 = b;
        }

        // Now do crossover 47 (iter=47) step by step
        // net1.crossover(&net2...) produces 'a'
        // Try with mutation_passes=0 to see if problem is in merge or mutations
        let mut settings_0 = settings.clone();
        settings_0.mutation_passes = 0;
        let a0 = net1.crossover(&net2, &settings_0, 0.25, &mut StdRng::seed_from_u64(47_000));
        let cyc0 = find_cycle_helper(&a0);
        println!("iter=47, mutation_passes=0 cycle: {:?}", cyc0);

        let mut settings_1 = settings.clone();
        settings_1.mutation_passes = 1;
        let a1 = net1.crossover(&net2, &settings_1, 0.25, &mut StdRng::seed_from_u64(47_000));
        let cyc1 = find_cycle_helper(&a1);
        println!("iter=47, mutation_passes=1 cycle: {:?}", cyc1);

        let mut settings_2 = settings.clone();
        settings_2.mutation_passes = 2;
        let a2 = net1.crossover(&net2, &settings_2, 0.25, &mut StdRng::seed_from_u64(47_000));
        let cyc2 = find_cycle_helper(&a2);
        println!("iter=47, mutation_passes=2 cycle: {:?}", cyc2);

        let a3 = net1.crossover(&net2, &settings, 0.25, &mut StdRng::seed_from_u64(47_000));
        let cyc3 = find_cycle_helper(&a3);
        println!("iter=47, mutation_passes=3 cycle: {:?}", cyc3);

        // Also check net2.crossover(net1)
        let b0 = net2.crossover(&net1, &settings_0, 0.25, &mut StdRng::seed_from_u64(47_001));
        println!(
            "iter=47 b, mutation_passes=0 cycle: {:?}",
            find_cycle_helper(&b0)
        );
        let b3 = net2.crossover(&net1, &settings, 0.25, &mut StdRng::seed_from_u64(47_001));
        println!(
            "iter=47 b, mutation_passes=3 cycle: {:?}",
            find_cycle_helper(&b3)
        );
    }

    #[ignore]
    #[test]
    fn debug_find_bad_connection2() {
        // Reproduce: seed=0, iter=47 introduces a cycle
        // Must use the SAME rng throughout
        let mut rng = StdRng::seed_from_u64(0);
        let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
        let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);
        let settings = ReproductionSettings::default();

        for _iter in 0..47usize {
            let a = net1.crossover(&net2, &settings, 0.25, &mut rng);
            let b = net2.crossover(&net1, &settings, 0.25, &mut rng);
            net1 = a;
            net2 = b;
        }

        println!("net1 hidden len: {}", net1.hidden_layers.len());
        println!("net2 hidden len: {}", net2.hidden_layers.len());

        // Now at iteration 47, the actual test does:
        // a = net1.crossover(&net2, ...)
        // b = net2.crossover(&net1, ...)
        // And the cycle shows up in 'a'

        // Test 'a' with 0, 1, 2, 3 mutation passes, using the current rng state
        let settings_3 = settings.clone();

        // We need separate snapshots of rng state for each test
        // But since we can't clone StdRng, let's just do it sequentially

        // Can't replay rng here... let's just do the actual crossover and check

        // Let's just do the full mutation=3 crossover and check step-by-step
        // by doing the crossover merge first (mutation_passes=0)
        // and checking after each mutation pass
        let a = net1.crossover(&net2, &settings_3, 0.25, &mut rng);
        let cycle = find_cycle_helper(&a);
        println!("a (full crossover) cycle: {:?}", cycle);

        let b = net2.crossover(&net1, &settings_3, 0.25, &mut rng);
        let cycle_b = find_cycle_helper(&b);
        println!("b (full crossover) cycle: {:?}", cycle_b);
    }

    #[ignore]
    #[test]
    fn debug_add_connection_cycle() {
        // Try to find a case where add_connection adds a cyclic connection
        let settings_with_mut = ReproductionSettings::default();

        for seed in 0..100u64 {
            let mut rng = StdRng::seed_from_u64(seed);
            let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
            let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);

            for iter in 0..50usize {
                let a = net1.crossover(&net2, &settings_with_mut, 0.25, &mut rng);
                let b = net2.crossover(&net1, &settings_with_mut, 0.25, &mut rng);

                // Double-check: is_connection_safe should return false for any existing cycle
                for i in 0..10usize {
                    let from = NeuronLocation::Input(i);
                    let n = &a[from];
                    for &to in n.outputs.keys() {
                        // Check if is_connection_safe correctly returns false for reverse connection
                        if let NeuronLocation::Hidden(_) | NeuronLocation::Output(_) = to {
                            // don't test
                        }
                    }
                }

                let cycle_a = find_cycle_helper(&a);
                let cycle_b = find_cycle_helper(&b);

                if cycle_a.is_some() || cycle_b.is_some() {
                    println!(
                        "seed={} iter={} cycle_a={:?} cycle_b={:?}",
                        seed, iter, cycle_a, cycle_b
                    );
                    // Print the first cycle node's connections
                    if let Some(ref cycle) = cycle_a {
                        for &node in cycle {
                            println!(
                                "  {:?} -> {:?}",
                                node,
                                a[node].outputs.keys().collect::<Vec<_>>()
                            );
                        }
                    }
                    // Check if is_connection_safe would detect the cycle
                    if let Some(ref cycle) = cycle_a {
                        let n = cycle.len();
                        for i in 0..n {
                            let from = cycle[i];
                            let to = cycle[(i + 1) % n];
                            // The edge from -> to creates a cycle, so is_connection_safe should return false
                            // But does it?
                            let safe = a.is_connection_safe(Connection { from, to });
                            println!("  is_connection_safe({:?} -> {:?}) = {}", from, to, safe);
                            // If safe returns true, that means the check is broken
                            // (this connection already exists, but it should also detect the EXISTING cycle)
                        }
                    }
                    return;
                }

                net1 = a;
                net2 = b;
            }
        }
        println!("No cycles found!");
    }

    #[ignore]
    #[test]
    fn debug_split_creates_cycle() {
        // Check whether split_connection creates cycles
        let settings_with_mut = ReproductionSettings::default();

        for seed in 0..50u64 {
            let mut rng = StdRng::seed_from_u64(seed);
            let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
            let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);

            for iter in 0..50usize {
                let a = net1.crossover(&net2, &settings_with_mut, 0.25, &mut rng);
                let b = net2.crossover(&net1, &settings_with_mut, 0.25, &mut rng);

                // Check if there's a cycle in the networks just from split operations
                // We'll do this by checking after each mutation pass manually
                // For now, just check that we can use is_connection_safe consistently

                // Verify: for every existing edge u->v, is_connection_safe(v->u) should be false
                // (since u->v exists, v->u would create a 2-cycle)
                let mut found_issue = false;
                for i in 0..10usize {
                    let from = NeuronLocation::Input(i);
                    let outputs = a[from].outputs.keys().cloned().collect::<Vec<_>>();
                    for to in outputs {
                        // Edge from->to exists. Adding to->from should be unsafe.
                        // But to->from might be blocked by "from.is_output()" or "to.is_input()" checks.
                        if !to.is_input() && !from.is_output() {
                            // If from is Input, then to is not Input. to->from = to->Input(i).
                            // is_connection_safe checks "connection.to.is_input()" -> false.
                            // So it returns false. Expected.
                        }
                    }
                }
                for i in 0..a.hidden_layers.len() {
                    let from = NeuronLocation::Hidden(i);
                    let outputs = a[from].outputs.keys().cloned().collect::<Vec<_>>();
                    for to in outputs {
                        // Edge from->to (Hidden(i)->to). Can we add to->Hidden(i)?
                        if to.is_hidden() || to.is_input() {
                            // to->Hidden(i): to is not output (hidden or input)
                            // is_connection_safe checks:
                            // - to.is_output() -> false if to is hidden/input ✓
                            //   Wait, "to" here is the "from" in the reverse connection!
                            let rev_conn = Connection { from: to, to: from };
                            // Check if connection.from.is_output() blocks it
                            if to.is_output() {
                                // "from.is_output()" in is_connection_safe returns false
                                assert!(!a.is_connection_safe(rev_conn));
                            } else if from.is_input() {
                                // "to.is_input()" in is_connection_safe returns false
                                assert!(!a.is_connection_safe(rev_conn));
                            } else {
                                // Should be unsafe since there's already from->to
                                if a.is_connection_safe(rev_conn) {
                                    println!("BUG: seed={} iter={}: is_connection_safe says {:?}->{:?} is SAFE but {:?}->{:?} exists!",
                                    seed, iter, to, from, from, to);
                                    // Print the cycle path
                                    println!(
                                        "  {:?} outputs: {:?}",
                                        from,
                                        a[from].outputs.keys().collect::<Vec<_>>()
                                    );
                                    println!(
                                        "  {:?} outputs: {:?}",
                                        to,
                                        a[to].outputs.keys().collect::<Vec<_>>()
                                    );
                                    found_issue = true;
                                }
                            }
                        }
                    }
                }

                if found_issue {
                    return;
                }

                net1 = a;
                net2 = b;
            }
        }
        println!("No issues found!");
    }

    #[ignore]
    #[test]
    fn debug_find_mutation_pass_cycle() {
        let settings = ReproductionSettings::default();
        let settings3 = settings.clone(); // mutation_passes = 3

        'outer: for seed in 0..100u64 {
            let mut rng = StdRng::seed_from_u64(seed);
            let mut net1 = NeuralNetwork::<10, 10>::new(&mut rng);
            let mut net2 = NeuralNetwork::<10, 10>::new(&mut rng);

            for iter in 0..50usize {
                // We can't easily replay RNG, so just check the final result
                let a = net1.crossover(&net2, &settings3, 0.25, &mut rng);
                let b = net2.crossover(&net1, &settings3, 0.25, &mut rng);

                // Verify that a network that claims to be acyclic is consistent with is_connection_safe
                // For a network with cycle C->D->E->C, is_connection_safe(E->C) should be false
                // But it might be TRUE (that's the bug)
                if let Some(cycle) = find_cycle_helper(&a) {
                    println!("seed={} iter={} CYCLE in a: {:?}", seed, iter, cycle);
                    // Find the specific edge in the cycle that is_connection_safe missed
                    for i in 0..cycle.len() {
                        let from = cycle[i];
                        let to = cycle[(i + 1) % cycle.len()];
                        // This edge exists in the network
                        println!("  Edge {:?} -> {:?} exists. Checking if is_connection_safe would allow adding it again:", from, to);
                        // Check reverse edge
                        if !to.is_input() && !from.is_output() {
                            let safe = a.is_connection_safe(Connection { from: to, to: from });
                            println!(
                            "    is_connection_safe({:?} -> {:?}) = {} (should be false for cycle)",
                            to, from, safe
                        );
                        }
                    }
                    println!("  Full network connections:");
                    for i in 0..a.hidden_layers.len() {
                        let n = &a[NeuronLocation::Hidden(i)];
                        println!(
                            "    Hidden({}) -> {:?}",
                            i,
                            n.outputs.keys().collect::<Vec<_>>()
                        );
                    }
                    break 'outer;
                }

                net1 = a;
                net2 = b;
            }
        }
        println!("Done searching");
    }
}