use super::*;
use crate::autograd::Tensor;
#[test]
fn falsify_lora_001_output_shape() {
let config = LoRAConfig::new(4, 1.0);
let adapter = LoRAAdapter::new(8, 16, config);
let base_weight = Tensor::new(&vec![0.1; 16 * 8], &[16, 8]);
let result = adapter.apply(&base_weight);
assert_eq!(
result.shape(),
&[16, 8],
"FALSIFIED LORA-001: output shape {:?} != base shape [16, 8]",
result.shape()
);
}
#[test]
fn falsify_lora_002_zero_init_identity() {
let config = LoRAConfig::new(4, 1.0);
let adapter = LoRAAdapter::new(8, 16, config);
let base_data: Vec<f32> = (0..128).map(|i| i as f32 * 0.01).collect();
let base_weight = Tensor::new(&base_data, &[16, 8]);
let result = adapter.apply(&base_weight);
for (i, (&r, &b)) in result.data().iter().zip(base_data.iter()).enumerate() {
assert!(
(r - b).abs() < 0.1,
"FALSIFIED LORA-002: result[{i}]={r} far from base[{i}]={b} (zero B should preserve)"
);
}
}
#[test]
fn falsify_lora_003_finite_output() {
let config = LoRAConfig::new(4, 1.0);
let adapter = LoRAAdapter::new(8, 16, config);
let base_weight = Tensor::new(&vec![1.0; 128], &[16, 8]);
let result = adapter.apply(&base_weight);
for (i, &v) in result.data().iter().enumerate() {
assert!(
v.is_finite(),
"FALSIFIED LORA-003: output[{i}] = {v} is not finite"
);
}
}
#[test]
fn falsify_lora_004_scaling_affects_output() {
let config1 = LoRAConfig::new(4, 1.0);
let config2 = LoRAConfig::new(4, 10.0);
assert!(
(config1.scaling() - config2.scaling()).abs() > 1e-6,
"FALSIFIED LORA-004: different alpha produces same scaling"
);
}
mod lora_proptest_falsify {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn falsify_lora_001_prop_output_shape(
rank in 1..=8usize,
) {
let in_dim = 8;
let out_dim = 16;
let config = LoRAConfig::new(rank, 1.0);
let adapter = LoRAAdapter::new(in_dim, out_dim, config);
let base_weight = Tensor::new(&vec![0.1; out_dim * in_dim], &[out_dim, in_dim]);
let result = adapter.apply(&base_weight);
prop_assert_eq!(
result.shape(),
&[out_dim, in_dim],
"FALSIFIED LORA-001-prop: output shape {:?} != [{}, {}]",
result.shape(), out_dim, in_dim
);
}
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn falsify_lora_003_prop_finite_output(
rank in 1..=8usize,
) {
let config = LoRAConfig::new(rank, 1.0);
let adapter = LoRAAdapter::new(8, 16, config);
let base_weight = Tensor::new(&vec![1.0; 128], &[16, 8]);
let result = adapter.apply(&base_weight);
for (i, &v) in result.data().iter().enumerate() {
prop_assert!(
v.is_finite(),
"FALSIFIED LORA-003-prop: output[{}]={} not finite (rank={})",
i, v, rank
);
}
}
}
}