pub trait BackwardAll<U>: PreTrain<U> + UpdateWeight<U>where
U: UnitValue<U>,{
type LossInput: Debug;
type LossOutput: Debug;
// Required method
fn backward_all<L: LossFunction<U>>(
&mut self,
input: Self::LossInput,
stack: Self::OutStack,
lossf: &L,
) -> Result<(<Self as BackwardAll<U>>::LossOutput, <Self as UpdateWeight<U>>::GradientStack), TrainingError>;
// Provided method
fn is_canonical_link<L: LossFunction<U>>(&self, _: &L) -> bool { ... }
}
Expand description
Trait defining the implementation of error back propagation in neural networks
Required Associated Types§
Sourcetype LossOutput: Debug
type LossOutput: Debug
Losses in the top layer during neural network training
Required Methods§
Sourcefn backward_all<L: LossFunction<U>>(
&mut self,
input: Self::LossInput,
stack: Self::OutStack,
lossf: &L,
) -> Result<(<Self as BackwardAll<U>>::LossOutput, <Self as UpdateWeight<U>>::GradientStack), TrainingError>
fn backward_all<L: LossFunction<U>>( &mut self, input: Self::LossInput, stack: Self::OutStack, lossf: &L, ) -> Result<(<Self as BackwardAll<U>>::LossOutput, <Self as UpdateWeight<U>>::GradientStack), TrainingError>
Provided Methods§
fn is_canonical_link<L: LossFunction<U>>(&self, _: &L) -> bool
Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.