1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
// CONTRACT: silu-kernel-v1.yaml
// HASH: sha256:e1f2a3b4c5d67890
// Generated by: pv probar --binding
// DO NOT EDIT — regenerate with `pv probar --binding`
use aprender::nn::functional::silu_scalar;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(256))]
/// FALSIFY-SILU-001: SiLU(0) = 0 (invariant)
/// Formal: SiLU(0) = 0 * sigmoid(0) = 0
#[test]
fn prop_at_zero(
_dummy in 0..1i32
) {
let val = silu_scalar(0.0);
prop_assert!(
val.abs() < 1e-7,
"SiLU(0) = {val}, expected 0.0"
);
}
/// FALSIFY-SILU-002: SiLU monotonic for x >= 0 (monotonicity)
/// Formal: x >= y >= 0 → SiLU(x) >= SiLU(y)
#[test]
fn prop_monotonic_positive(
x in 0.0f32..100.0,
y in 0.0f32..100.0
) {
let (big, small) = if x >= y { (x, y) } else { (y, x) };
let silu_big = silu_scalar(big);
let silu_small = silu_scalar(small);
prop_assert!(
silu_big >= silu_small - 1e-6,
"SiLU({big})={silu_big} < SiLU({small})={silu_small} — monotonicity violated"
);
}
/// FALSIFY-SILU-003: SiLU bounded below (bound)
/// Formal: SiLU(x) >= -0.279 (global minimum ≈ -0.2784 at x ≈ -1.278)
#[test]
fn prop_bounded_below(
x in -100.0f32..100.0
) {
let val = silu_scalar(x);
prop_assert!(
val >= -0.28,
"SiLU({x}) = {val} < -0.28 — lower bound violated"
);
}
/// FALSIFY-SILU-004: SiLU approaches x for large positive x (asymptotic)
/// Formal: SiLU(x) / x → 1 as x → +∞
#[test]
fn prop_asymptotic(
x in 10.0f32..100.0
) {
let val = silu_scalar(x);
let ratio = val / x;
prop_assert!(
(ratio - 1.0).abs() < 0.01,
"SiLU({x})/x = {ratio}, expected ~1.0"
);
}
/// FALSIFY-SILU-005: SIMD matches scalar — both paths produce same result
/// (tests Tensor-based silu vs scalar silu_scalar)
#[test]
fn prop_simd_equivalence(
data in proptest::collection::vec(-10.0f32..10.0, 1..32usize)
) {
let tensor = aprender::autograd::Tensor::from_slice(&data);
let result = aprender::nn::functional::silu(&tensor);
for (i, (&input, &output)) in data.iter().zip(result.data().iter()).enumerate() {
let expected = silu_scalar(input);
let diff = (output - expected).abs();
prop_assert!(
diff < 1e-6,
"Tensor vs scalar mismatch at [{i}]: tensor={output}, scalar={expected}"
);
}
}
}