1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
use DynClone;
use ;
use ;
use crate::;
/// Trait defining the training behavior of a layer in a neural network.
///
/// The `TrainLayer` trait provides methods required to compute the forward and backward passes
/// during training and inference. It focuses on the computational transformations
/// performed by a layer and the calculation of gradients for backpropagation.
///
/// ## Key Responsibilities
/// - **Forward Pass**: Processes the input data to produce an output.
/// - **Backward Pass**: Calculates gradients with respect to the input and updates parameters.
///
/// Core trait defining the behavior of a layer in a neural network.
///
/// The `Layer` trait establishes a common interface for all layers, ensuring they can
/// be integrated into a neural network and participate in both inference and training phases.
///
/// ## Key Features
/// - **Type Identification**: Provides the layer's type as a string for debugging or serialization.
/// - **Runtime Polymorphism**: Enables dynamic layer management through `Any` and `DynClone`.
/// - **Extensibility**: Custom layers can implement this trait to seamlessly integrate into the framework.
///
/// ## Required Traits
/// Layers implementing `Layer` must also implement:
/// - `TrainLayer`: For forward and backward computations.
/// - `MSGPackFormatting`: For serialization and deserialization in MessagePack format.
/// - `Any`: For runtime downcasting of the layer.
/// - `DynClone`: For cloning layer instances dynamically.
/// - `Debug`: For inspecting layer properties during debugging.
///
/// ## Example
/// ```rust
/// use mininn::prelude::*;
/// use mininn_derive::Layer;
/// use ndarray::{ArrayD, ArrayViewD};
/// use serde::{Deserialize, Serialize};
///
/// #[derive(Layer, Debug, Clone, Serialize, Deserialize)]
/// pub struct DenseLayer {
/// weights: ArrayD<f32>,
/// biases: ArrayD<f32>,
/// }
///
/// impl TrainLayer for DenseLayer {
/// fn forward(&mut self, input: ArrayViewD<f32>, mode: &NNMode) -> NNResult<ArrayD<f32>> {
/// // Perform forward pass computation
/// todo!()
/// }
///
/// fn backward(
/// &mut self,
/// output_gradient: ArrayViewD<f32>,
/// learning_rate: f32,
/// optimizer: &Optimizer,
/// mode: &NNMode,
/// ) -> NNResult<ArrayD<f32>> {
/// // Perform backward pass computation
/// todo!()
/// }
/// }
/// ```
///
clone_trait_object!;