#![allow(clippy::missing_docs_in_private_items, clippy::print_stdout, clippy::unwrap_used)]
use esopt::*;
use ofnn::{Float, *};
fn main() {
let x = vec![vec![0.0, 0.0], vec![0.0, 1.0], vec![1.0, 0.0], vec![1.0, 1.0]];
let y = vec![vec![1.0, 0.0], vec![0.0, 1.0], vec![0.0, 1.0], vec![1.0, 0.0]];
let mut model = Sequential::new(2); model
.add_layer_dense(3, Initializer::He) .add_layer_prelu(0.05) .add_layer_dense(2, Initializer::Glorot) .add_layer(Layer::SoftMax);
let eval = NNEvaluator::new(model.clone(), x.clone(), y);
let mut adam = Adam::new();
adam.set_lr(0.25).set_lambda(0.01);
let mut opt = ES::new(adam, eval); opt.set_params(model.get_params()).set_std(0.1).set_samples(50);
for i in 0..5 {
let n = 5;
let res = opt.optimize(n); println!("After {} iteratios:", (i + 1) * n);
println!("Loss: {}", -res.0); println!("Gradnorm: {}", res.1);
println!();
}
model.set_params(opt.get_params());
println!("PReLU factor: {:?}", model.get_layers()[1]);
let pred = model.predict(&x);
println!("Prediction on {:?}: {}", x[0], pred[0][1]);
println!("Prediction on {:?}: {}", x[1], pred[1][1]);
println!("Prediction on {:?}: {}", x[2], pred[2][1]);
println!("Prediction on {:?}: {}", x[3], pred[3][1]);
}
#[derive(Clone)]
struct NNEvaluator {
model: Sequential,
x: Vec<Vec<Float>>,
y: Vec<Vec<Float>>,
}
impl NNEvaluator {
pub fn new(model: Sequential, x: Vec<Vec<Float>>, y: Vec<Vec<Float>>) -> NNEvaluator {
NNEvaluator { model, x, y }
}
}
impl Evaluator for NNEvaluator {
fn eval_test(&self, params: &[Float]) -> Float {
let mut local = self.model.clone();
local.set_params(params);
let pred = local.predict(&self.x);
let mut score = -losses::categorical_crossentropy(&pred, &self.y);
if score.is_nan() {
score = 0.0;
} score
}
fn eval_train(&self, params: &[Float], _: usize) -> Float {
self.eval_test(params)
}
}