#[cfg(test)]
mod flow_print_cross_tests {
use super::*;
#[test]
fn test_compute_stats_empty() {
let (min, max, mean, std) = compute_stats(&[]);
assert_eq!(min, 0.0);
assert_eq!(max, 0.0);
assert_eq!(mean, 0.0);
assert_eq!(std, 0.0);
}
#[test]
fn test_compute_stats_single_element() {
let (min, max, mean, std) = compute_stats(&[42.0]);
assert_eq!(min, 42.0);
assert_eq!(max, 42.0);
assert_eq!(mean, 42.0);
assert_eq!(std, 0.0);
}
#[test]
fn test_compute_stats_uniform() {
let data = vec![5.0, 5.0, 5.0, 5.0];
let (min, max, mean, std) = compute_stats(&data);
assert_eq!(min, 5.0);
assert_eq!(max, 5.0);
assert_eq!(mean, 5.0);
assert_eq!(std, 0.0);
}
#[test]
fn test_compute_stats_known_values() {
let data = vec![1.0, 2.0, 3.0, 4.0, 5.0];
let (min, max, mean, std) = compute_stats(&data);
assert_eq!(min, 1.0);
assert_eq!(max, 5.0);
assert!((mean - 3.0).abs() < 1e-5);
assert!((std - 2.0_f32.sqrt()).abs() < 1e-4);
}
#[test]
fn test_compute_stats_negative_values() {
let data = vec![-3.0, -1.0, 0.0, 1.0, 3.0];
let (min, max, mean, std) = compute_stats(&data);
assert_eq!(min, -3.0);
assert_eq!(max, 3.0);
assert!((mean - 0.0).abs() < 1e-5);
assert!((std - 2.0).abs() < 1e-4);
}
#[test]
fn test_compute_stats_large_range() {
let data = vec![-1000.0, 0.0, 1000.0];
let (min, max, mean, _std) = compute_stats(&data);
assert_eq!(min, -1000.0);
assert_eq!(max, 1000.0);
assert!((mean - 0.0).abs() < 1e-3);
}
#[test]
fn test_print_cross_attention_flow_no_cross_attn_layers() {
let tensor_names = vec![
"model.layers.0.self_attn.q_proj.weight".to_string(),
"model.layers.0.self_attn.k_proj.weight".to_string(),
];
print_cross_attention_flow(None, &tensor_names, None, false);
}
#[test]
fn test_print_cross_attention_flow_empty_names() {
print_cross_attention_flow(None, &[], None, false);
}
#[test]
fn test_print_cross_attention_flow_with_cross_attn() {
let tensor_names = vec![
"decoder.layers.0.encoder_attn.q_proj.weight".to_string(),
"decoder.layers.0.encoder_attn.k_proj.weight".to_string(),
"decoder.layers.0.encoder_attn.v_proj.weight".to_string(),
"decoder.layers.0.encoder_attn.out_proj.weight".to_string(),
"decoder.layers.1.encoder_attn.q_proj.weight".to_string(),
];
print_cross_attention_flow(None, &tensor_names, None, false);
}
#[test]
fn test_print_cross_attention_flow_with_layer_filter() {
let tensor_names = vec![
"decoder.layers.0.encoder_attn.q_proj.weight".to_string(),
"decoder.layers.1.encoder_attn.q_proj.weight".to_string(),
"decoder.layers.2.encoder_attn.q_proj.weight".to_string(),
];
print_cross_attention_flow(None, &tensor_names, Some("layers.1"), false);
}
#[test]
fn test_print_cross_attention_flow_verbose_no_reader() {
let tensor_names = vec![
"decoder.layers.0.encoder_attn.q_proj.weight".to_string(),
];
print_cross_attention_flow(None, &tensor_names, None, true);
}
#[test]
fn test_print_cross_attention_flow_cross_attn_variant() {
let tensor_names = vec![
"model.layers.0.cross_attn.q_proj.weight".to_string(),
"model.layers.0.cross_attn.k_proj.weight".to_string(),
];
print_cross_attention_flow(None, &tensor_names, None, false);
}
#[test]
fn test_print_self_attention_flow_no_panic() {
print_self_attention_flow(None, &[], None, false);
}
#[test]
fn test_print_self_attention_flow_with_names() {
let names = vec!["model.layers.0.self_attn.q_proj.weight".to_string()];
print_self_attention_flow(None, &names, None, true);
}
#[test]
fn test_print_ffn_flow_no_panic() {
print_ffn_flow(None, &[], None, false);
}
#[test]
fn test_print_ffn_flow_with_names() {
let names = vec!["model.layers.0.mlp.fc1.weight".to_string()];
print_ffn_flow(None, &names, None, true);
}
}