BackwardAll

Trait BackwardAll 

Source
pub trait BackwardAll<U>: PreTrain<U> + UpdateWeight<U>
where U: UnitValue<U>,
{ type LossInput: Debug; type LossOutput: Debug; // Required method fn backward_all<L: LossFunction<U>>( &mut self, input: Self::LossInput, stack: Self::OutStack, lossf: &L, ) -> Result<(<Self as BackwardAll<U>>::LossOutput, <Self as UpdateWeight<U>>::GradientStack), TrainingError>; // Provided method fn is_canonical_link<L: LossFunction<U>>(&self, _: &L) -> bool { ... } }
Expand description

Trait defining the implementation of error back propagation in neural networks

Required Associated Types§

Source

type LossInput: Debug

Losses during neural network training

Source

type LossOutput: Debug

Losses in the top layer during neural network training

Required Methods§

Source

fn backward_all<L: LossFunction<U>>( &mut self, input: Self::LossInput, stack: Self::OutStack, lossf: &L, ) -> Result<(<Self as BackwardAll<U>>::LossOutput, <Self as UpdateWeight<U>>::GradientStack), TrainingError>

Back propagation of errors

§Arguments
  • input - loss
  • stack - Stack to store calculation results at upper layers
  • lossf - loss function
§Errors

This function may return the following errors

Provided Methods§

Dyn Compatibility§

This trait is not dyn compatible.

In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.

Implementors§

Source§

impl<U, C, BC, P, D, I, PI, OP, const NI: usize, const NO: usize> BackwardAll<U> for LinearLayer<U, C, BC, P, D, I, PI, OP, NI, NO>
where P: BackwardAll<U, LossInput = PI> + ForwardAll<Input = I, Output = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + Send + UnitValue<U>, I: Debug + Send + Sync, C: Debug, BC: Debug, OP: Optimizer<U, D>, PI: Debug + BatchDataType + From<<D as DeviceLinear<U, C, BC, PI, NI, NO>>::LossOutput>, D: Device<U> + DeviceLinear<U, C, BC, PI, NI, NO>, for<'a, 'a> &'a <OP as Optimizer<U, D>>::InternalType: From<&'a C> + From<&'a BC>, for<'a, 'a> <OP as Optimizer<U, D>>::InternalUpdateType<'a>: From<&'a mut C> + From<&'a mut BC>,

Source§

type LossInput = <D as DeviceLinear<U, C, BC, PI, NI, NO>>::Output

Source§

type LossOutput = <P as BackwardAll<U>>::LossOutput

Source§

impl<U, C, BC, P, D, OP, I, const NI: usize, const NO: usize> BackwardAll<U> for DiffLinearLayer<U, C, BC, P, OP, D, I, NI, NO>
where P: BackwardAll<U, LossInput = ()> + ForwardAll<Input = I, Output = DiffInput<DiffArr<U, NI>, U, NI, NO>> + PreTrain<U, PreOutput = DiffInput<DiffArr<U, NI>, U, NI, NO>> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, I: Debug + Send + Sync, C: Debug, BC: Debug + From<<D as DeviceDiffLinear<U, C, BC, NI, NO>>::Output>, OP: Optimizer<U, D>, D: Device<U> + DeviceDiffLinear<U, C, BC, NI, NO>, <D as DeviceDiffLinear<U, C, BC, NI, NO>>::Output: Debug + 'static, for<'a, 'a> &'a <OP as Optimizer<U, D>>::InternalType: From<&'a C> + From<&'a BC>, for<'a, 'a> <OP as Optimizer<U, D>>::InternalUpdateType<'a>: From<&'a mut C> + From<&'a mut BC>, Self: ForwardAll + PreTrain<U, OutStack = Cons<<P as PreTrain<U>>::OutStack, BC>>,

Source§

type LossInput = <D as DeviceDiffLinear<U, C, BC, NI, NO>>::Output

Source§

type LossOutput = <P as BackwardAll<U>>::LossOutput

Source§

impl<U, C, P, OP, D, I, PI, S, const N: usize> BackwardAll<U> for BatchNormalizationLayer<U, C, P, OP, D, I, PI, S, N>
where P: ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + Send + UnitValue<U>, I: Debug + Send + Sync, PI: BatchDataType + Debug + 'static, S: Debug + Sized + 'static, C: Debug, OP: Optimizer<U, D>, D: Device<U> + DeviceBatchNorm<U, C, PI, N>, <PI as BatchDataType>::Type: Debug + 'static, for<'a> &'a <OP as Optimizer<U, D>>::InternalType: From<&'a C>, for<'a> <OP as Optimizer<U, D>>::InternalUpdateType<'a>: From<&'a mut C>,

Source§

impl<U, O, LI, D> BackwardAll<U> for InputLayer<U, O, LI, D>
where U: UnitValue<U>, O: Debug + BatchDataType + Send + Sync + 'static, LI: Debug, D: Device<U> + DeviceInput<U, O>, <O as BatchDataType>::Type: Debug + 'static,

Source§

impl<U, P, A, I, PI, D, const N: usize> BackwardAll<U> for ActivationLayer<U, P, A, I, PI, D, N>
where P: PreTrain<U, PreOutput = PI> + ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, D: Device<U> + DeviceActivation<U, PI, A, N>, PI: Debug + BatchDataType, I: Debug + Send + Sync,

Source§

impl<U, P, D, I, PI, const N: usize> BackwardAll<U> for LinearOutputLayer<U, P, D, I, PI, N>
where P: BackwardAll<U, LossInput = PI> + ForwardAll<Input = I, Output = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, PI: Debug + BatchDataType + ToHost<U, Output = Arr<U, N>> + 'static, I: Debug + Send + Sync, <PI as ToHost<U>>::Output: Debug + 'static, for<'a> D: Device<U> + DeviceLinearOutput<'a, U, N, IO = PI>,

Source§

impl<U, P, I, PI, CI, D> BackwardAll<U> for BridgeLayer<U, P, I, PI, CI, D>
where P: PreTrain<U, PreOutput = PI> + ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, D: Device<U>, PI: Debug + From<CI>, CI: Debug + 'static, I: Debug + Send + Sync,

Source§

impl<U, P, OP, D, I, PI, const N: usize> BackwardAll<U> for BiasLayer<U, Arr<U, N>, P, OP, D, I, PI, N>
where P: BackwardAll<U, LossInput = PI> + ForwardAll<Input = I, Output = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + Send + UnitValue<U>, D: Device<U> + DeviceBias<U, Arr<U, N>, PI, N>, I: Debug + Send + Sync, PI: Debug + BatchDataType + 'static, OP: Optimizer<U, D>, <PI as BatchDataType>::Type: Debug + BatchSize + 'static, for<'a> &'a <OP as Optimizer<U, D>>::InternalType: From<&'a Arr<U, N>>, for<'a> <OP as Optimizer<U, D>>::InternalUpdateType<'a>: From<&'a mut Arr<U, N>>,