use numrs::{Array, Tensor, Linear, Sequential, Module, ReLU};
use numrs::autograd::train::{Dataset, MSELoss, TrainerBuilder};
use anyhow::Result;
fn main() -> Result<()> {
println!("π§ͺ ValidaciΓ³n de Modelo Complejo\n");
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ\n");
println!("π Test 1: Red profunda 5β32β32β16β1");
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ");
println!("Problema: Aproximar f(x) = xβΒ² + sin(xβ) + xβ*xβ + cos(xβ
)");
println!();
{
let mut x_data = Vec::new();
let mut y_data = Vec::new();
for i in 0..100 {
let mut x = Vec::new();
for j in 0..5 {
let val = (i as f32 / 50.0 - 1.0) + (j as f32 / 10.0 - 0.2);
x.push(val);
}
let y = x[0].powi(2) * 2.0 + x[1].sin() + x[2] * x[3] + x[4].cos() + 0.5;
x_data.push(x);
y_data.push(vec![y]);
}
let dataset = Dataset::new(x_data.clone(), y_data.clone(), 10);
let model = Sequential::new(vec![
Box::new(Linear::new(5, 32)?),
Box::new(ReLU),
Box::new(Linear::new(32, 32)?),
Box::new(ReLU),
Box::new(Linear::new(32, 16)?),
Box::new(ReLU),
Box::new(Linear::new(16, 1)?),
]);
println!(" Arquitectura: 5β32β32β16β1 (4 capas)");
println!(" Total parΓ‘metros: ~1,700");
println!(" Dataset: 100 ejemplos");
println!(" Batch size: 10");
println!(" Optimizer: Adam(lr=0.01)");
println!();
let mut trainer = TrainerBuilder::new(model)
.learning_rate(0.01)
.build_adam(Box::new(MSELoss));
let epochs_checkpoints = vec![1, 10, 25, 50, 100, 150];
let mut prev_loss = f32::INFINITY;
for &target_epoch in &epochs_checkpoints {
let current_epoch = if target_epoch == 1 { 0 } else {
epochs_checkpoints.iter()
.position(|&e| e == target_epoch)
.unwrap()
.saturating_sub(1)
};
let start_epoch = if current_epoch > 0 {
epochs_checkpoints[current_epoch]
} else {
0
};
for epoch in start_epoch..target_epoch {
trainer.train_epoch(&dataset)?;
}
let metrics = trainer.evaluate(&dataset)?;
let improvement = if prev_loss.is_finite() {
((prev_loss - metrics.loss) / prev_loss * 100.0).max(0.0)
} else {
0.0
};
println!(" Epoch {:3}: loss={:.6} (mejora: {:.1}%)",
target_epoch, metrics.loss, improvement);
prev_loss = metrics.loss;
}
println!();
println!(" ValidaciΓ³n en ejemplos especΓficos:");
let test_indices = vec![0, 25, 50, 75, 99];
let mut total_error = 0.0;
for &idx in &test_indices {
let x = Tensor::new(
Array::new(vec![1, 5], x_data[idx].clone()),
false
);
let y_pred = trainer.model.forward(&x)?;
let y_true = y_data[idx][0];
let error = (y_pred.data.data[0] - y_true).abs();
let rel_error = if y_true.abs() > 0.01 {
(error / y_true.abs()) * 100.0
} else {
error * 100.0
};
println!(" Ejemplo {:2}: pred={:7.3}, true={:7.3}, error={:.4} ({:.1}%)",
idx, y_pred.data.data[0], y_true, error, rel_error);
total_error += error;
}
let avg_error = total_error / test_indices.len() as f32;
println!();
println!(" Error promedio: {:.4}", avg_error);
if avg_error < 0.2 {
println!(" β
Test 1 PASSED: Red profunda converge excelente (error < 0.2)\n");
} else if avg_error < 0.5 {
println!(" β
Test 1 PASSED: Red profunda converge bien (error < 0.5)\n");
} else if avg_error < 1.0 {
println!(" β οΈ Test 1 WARNING: Converge razonablemente (error < 1.0)\n");
} else {
println!(" β Test 1 FAILED: No converge adecuadamente\n");
}
}
println!("π Test 2: RegresiΓ³n no-lineal 3β24β24β12β1");
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ");
println!("Problema: y = xβ*xβ + sin(xβ*Ο)");
println!();
{
let mut x_data = Vec::new();
let mut y_data = Vec::new();
for i in 0..80 {
let x1 = (i as f32 / 40.0) - 1.0; let x2 = ((i * 3) % 80) as f32 / 40.0 - 1.0; let x3 = ((i * 7) % 80) as f32 / 80.0;
let x = vec![x1, x2, x3];
let y = x1 * x2 + (x3 * std::f32::consts::PI).sin();
x_data.push(x);
y_data.push(vec![y]);
}
let dataset = Dataset::new(x_data.clone(), y_data.clone(), 16);
let model = Sequential::new(vec![
Box::new(Linear::new(3, 24)?),
Box::new(ReLU),
Box::new(Linear::new(24, 24)?),
Box::new(ReLU),
Box::new(Linear::new(24, 12)?),
Box::new(ReLU),
Box::new(Linear::new(12, 1)?),
]);
println!(" Arquitectura: 3β24β24β12β1");
println!(" Total parΓ‘metros: ~900");
println!(" Dataset: 80 ejemplos");
println!(" FunciΓ³n: y = xβ*xβ + sin(xβ*Ο)");
println!(" Optimizer: Adam(lr=0.02)");
println!();
let mut trainer = TrainerBuilder::new(model)
.learning_rate(0.02)
.build_adam(Box::new(MSELoss));
for epoch in 0..150 {
trainer.train_epoch(&dataset)?;
if epoch % 30 == 0 {
let metrics = trainer.evaluate(&dataset)?;
println!(" Epoch {:3}: loss={:.4}", epoch, metrics.loss);
}
}
let final_metrics = trainer.evaluate(&dataset)?;
println!(" Epoch 150: loss={:.4}", final_metrics.loss);
println!();
println!(" Predicciones en ejemplos de test:");
let test_examples = vec![0, 20, 40, 60, 79];
let mut total_error = 0.0;
for &idx in &test_examples {
let x = Tensor::new(
Array::new(vec![1, 3], x_data[idx].clone()),
false
);
let y_pred = trainer.model.forward(&x)?;
let y_true = y_data[idx][0];
let error = (y_pred.data.data[0] - y_true).abs();
println!(" Input: [{:5.2}, {:5.2}, {:5.2}] β Pred: {:6.3}, True: {:6.3}, Error: {:.4}",
x_data[idx][0], x_data[idx][1], x_data[idx][2],
y_pred.data.data[0], y_true, error);
total_error += error;
}
let avg_error = total_error / test_examples.len() as f32;
println!();
println!(" Error promedio: {:.4}", avg_error);
if avg_error < 0.1 {
println!(" β
Test 2 PASSED: RegresiΓ³n excelente (error < 0.1)\n");
} else if avg_error < 0.3 {
println!(" β
Test 2 PASSED: RegresiΓ³n buena (error < 0.3)\n");
} else {
println!(" β οΈ Test 2 WARNING: Error alto (>{:.4})\n", avg_error);
}
}
println!("π Test 3: PredicciΓ³n de suma acumulada 10β16β8β1");
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ");
println!("Problema: Predecir sum(x) dado ventana de 10 valores");
println!();
{
let mut x_data = Vec::new();
let mut y_data = Vec::new();
for i in 0..60 {
let mut window = Vec::new();
let base = (i as f32 / 10.0).sin();
for t in 0..10 {
let val = base + (t as f32 / 10.0) * (i as f32 / 30.0).cos();
window.push(val);
}
let y: f32 = window.iter().sum();
x_data.push(window);
y_data.push(vec![y]);
}
let dataset = Dataset::new(x_data.clone(), y_data.clone(), 10);
let model = Sequential::new(vec![
Box::new(Linear::new(10, 16)?),
Box::new(ReLU),
Box::new(Linear::new(16, 8)?),
Box::new(ReLU),
Box::new(Linear::new(8, 1)?),
]);
println!(" Arquitectura: 10β16β8β1 (3 capas)");
println!(" Total parΓ‘metros: ~300");
println!(" Dataset: 60 ventanas");
println!(" Tarea: Predecir sum(ventana)");
println!(" Optimizer: Adam(lr=0.02)");
println!();
let mut trainer = TrainerBuilder::new(model)
.learning_rate(0.02)
.build_adam(Box::new(MSELoss));
for epoch in 0..100 {
trainer.train_epoch(&dataset)?;
if epoch % 20 == 0 {
let metrics = trainer.evaluate(&dataset)?;
println!(" Epoch {:3}: loss={:.4}", epoch, metrics.loss);
}
}
let final_metrics = trainer.evaluate(&dataset)?;
println!(" Epoch 100: loss={:.4}", final_metrics.loss);
println!();
println!(" Predicciones en ventanas de test:");
let test_windows = vec![0, 15, 30, 45, 59];
let mut total_error = 0.0;
for &idx in &test_windows {
let x = Tensor::new(
Array::new(vec![1, 10], x_data[idx].clone()),
false
);
let y_pred = trainer.model.forward(&x)?;
let y_true = y_data[idx][0];
let error = (y_pred.data.data[0] - y_true).abs();
println!(" Ventana {:2}: pred={:6.3}, true={:6.3}, error={:.4}",
idx, y_pred.data.data[0], y_true, error);
total_error += error;
}
let avg_error = total_error / test_windows.len() as f32;
println!();
println!(" Error promedio: {:.4}", avg_error);
if avg_error < 0.5 {
println!(" β
Test 3 PASSED: PredicciΓ³n excelente (error < 0.5)\n");
} else if avg_error < 1.5 {
println!(" β
Test 3 PASSED: PredicciΓ³n buena (error < 1.5)\n");
} else {
println!(" β οΈ Test 3 WARNING: Error alto (>{:.4})\n", avg_error);
}
}
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ");
println!("β
ValidaciΓ³n de modelos complejos completada");
println!("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ\n");
Ok(())
}