use crate::activate::{Activator, ActivatorGradient};
use concision_params::RawParams;
use concision_traits::{Backward, Forward};
pub trait RawLayer<F, A>
where
F: Activator<A>,
Self::Params<A>: RawParams<Elem = A>,
{
type Params<_T>;
fn rho(&self) -> &F;
fn params(&self) -> &Self::Params<A>;
fn forward<X, Y, Z>(&self, input: &X) -> Z
where
F: Activator<Y, Output = Z>,
Self::Params<A>: Forward<X, Output = Y>,
{
let y = self.params().forward(input);
self.rho().activate(y)
}
}
pub trait RawLayerMut<F, A>: RawLayer<F, A>
where
F: Activator<A>,
Self::Params<A>: RawParams<Elem = A>,
{
fn params_mut(&mut self) -> &mut Self::Params<A>;
fn backward<X, Y, Z, Dt>(&mut self, input: X, error: Y, gamma: A)
where
A: Clone,
F: ActivatorGradient<Y, Rel = F, Delta = Dt>,
Self::Params<A>: Backward<X, Dt, Elem = A>,
{
let delta = self.rho().activate_gradient(error);
self.params_mut().backward(&input, &delta, gamma)
}
fn set_params(&mut self, params: Self::Params<A>) {
*self.params_mut() = params;
}
fn replace_params(&mut self, params: Self::Params<A>) -> Self::Params<A> {
core::mem::replace(self.params_mut(), params)
}
fn swap_params(&mut self, other: &mut Self::Params<A>) {
core::mem::swap(self.params_mut(), other);
}
}