Trait PreTrain

Source
pub trait PreTrain<U>: ForwardAll
where U: UnitValue<U>,
{ type PreOutput: Debug + 'static; type OutStack: Stack<Head = Self::PreOutput> + Debug + Sized; // Required method fn pre_train( &self, input: Self::Input, ) -> Result<Self::OutStack, EvaluateError>; }
Expand description

Trait that defines the process of forward propagation performed prior to the process of error back propagation.

Required Associated Types§

Source

type PreOutput: Debug + 'static

The type of output that is piled on the stack during the error back propagation process.

Source

type OutStack: Stack<Head = Self::PreOutput> + Debug + Sized

Type of object to keep the results of forward propagation needed to perform error back propagation.

Required Methods§

Source

fn pre_train(&self, input: Self::Input) -> Result<Self::OutStack, EvaluateError>

Perform forward propagation required to perform error back propagation

§Arguments
  • input - input
§Errors

This function may return the following errors

Implementors§

Source§

impl<U, C, BC, P, D, I, PI, OP, const NI: usize, const NO: usize> PreTrain<U> for LinearLayer<U, C, BC, P, D, I, PI, OP, NI, NO>
where P: PreTrain<U, PreOutput = PI> + ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + Loss<U>, U: Default + Clone + Copy + Send + UnitValue<U>, I: Debug + Send + Sync, OP: Optimizer<U, D>, PI: Debug + BatchDataType + From<<D as DeviceLinear<U, C, BC, PI, NI, NO>>::LossOutput>, D: Device<U> + DeviceLinear<U, C, BC, PI, NI, NO>,

Source§

type PreOutput = <D as DeviceLinear<U, C, BC, PI, NI, NO>>::Output

Source§

type OutStack = Cons<<P as PreTrain<U>>::OutStack, <LinearLayer<U, C, BC, P, D, I, PI, OP, NI, NO> as PreTrain<U>>::PreOutput>

Source§

impl<U, C, BC, P, OP, D, I, const NI: usize, const NO: usize> PreTrain<U> for DiffLinearLayer<U, C, BC, P, OP, D, I, NI, NO>
where P: PreTrain<U, PreOutput = DiffInput<DiffArr<U, NI>, U, NI, NO>> + ForwardAll<Input = I, Output = DiffInput<DiffArr<U, NI>, U, NI, NO>> + BackwardAll<U, LossInput = ()> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, I: Debug + Send + Sync, OP: Optimizer<U, D>, D: Device<U> + DeviceDiffLinear<U, C, BC, NI, NO>, <D as DeviceDiffLinear<U, C, BC, NI, NO>>::Output: Debug + 'static,

Source§

type PreOutput = <D as DeviceDiffLinear<U, C, BC, NI, NO>>::Output

Source§

type OutStack = Cons<<P as PreTrain<U>>::OutStack, <DiffLinearLayer<U, C, BC, P, OP, D, I, NI, NO> as PreTrain<U>>::PreOutput>

Source§

impl<U, C, P, OP, D, I, PI, S, const N: usize> PreTrain<U> for BatchNormalizationLayer<U, C, P, OP, D, I, PI, S, N>
where P: ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + Send + UnitValue<U>, D: Device<U> + DeviceBatchNorm<U, C, PI, N>, I: Debug + Send + Sync, PI: BatchDataType + Debug + 'static, S: Debug + Sized + 'static, C: Debug, OP: Optimizer<U, D>, <PI as BatchDataType>::Type: Debug + 'static,

Source§

type PreOutput = PI

Source§

type OutStack = Cons<Cons<<P as PreTrain<U>>::OutStack, (C, C)>, <BatchNormalizationLayer<U, C, P, OP, D, I, PI, S, N> as PreTrain<U>>::PreOutput>

Source§

impl<U, C, P, OP, D, I, PI, const N: usize> PreTrain<U> for BiasLayer<U, C, P, OP, D, I, PI, N>
where P: PreTrain<U, PreOutput = PI> + ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + Loss<U>, D: Device<U> + DeviceBias<U, C, PI, N>, U: Default + Clone + Copy + Send + UnitValue<U>, I: Debug + Send + Sync, PI: Debug + BatchDataType + 'static, OP: Optimizer<U, D>, <PI as BatchDataType>::Type: Debug + BatchSize + 'static,

Source§

type PreOutput = PI

Source§

type OutStack = Cons<<P as PreTrain<U>>::OutStack, <BiasLayer<U, C, P, OP, D, I, PI, N> as PreTrain<U>>::PreOutput>

Source§

impl<U, O, LI, D> PreTrain<U> for InputLayer<U, O, LI, D>
where U: UnitValue<U>, O: Debug + BatchDataType + Send + Sync + 'static, LI: Debug, D: Device<U> + DeviceInput<U, O>, <O as BatchDataType>::Type: Debug + 'static,

Source§

type PreOutput = <D as DeviceInput<U, O>>::Output

Source§

type OutStack = Cons<Nil, <InputLayer<U, O, LI, D> as PreTrain<U>>::PreOutput>

Source§

impl<U, P, A, I, PI, D, const N: usize> PreTrain<U> for ActivationLayer<U, P, A, I, PI, D, N>
where P: ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, D: Device<U> + DeviceActivation<U, PI, A, N>, PI: Debug + BatchDataType, I: Debug + Send + Sync,

Source§

type PreOutput = PI

Source§

type OutStack = Cons<<P as PreTrain<U>>::OutStack, <ActivationLayer<U, P, A, I, PI, D, N> as PreTrain<U>>::PreOutput>

Source§

impl<U, P, D, I, PI, const N: usize> PreTrain<U> for LinearOutputLayer<U, P, D, I, PI, N>
where P: ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + PreTrain<U, PreOutput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, PI: Debug + BatchDataType + ToHost<U, Output = Arr<U, N>> + 'static, I: Debug + Send + Sync, <PI as ToHost<U>>::Output: Debug + 'static, for<'a> D: Device<U> + DeviceLinearOutput<'a, U, N, IO = PI>,

Source§

impl<U, P, I, PI, CI, D> PreTrain<U> for BridgeLayer<U, P, I, PI, CI, D>
where P: PreTrain<U, PreOutput = PI> + ForwardAll<Input = I, Output = PI> + BackwardAll<U, LossInput = PI> + Loss<U>, U: Default + Clone + Copy + UnitValue<U>, D: Device<U>, PI: Debug + From<CI>, CI: Debug + 'static, I: Debug + Send + Sync,