scirs2_neural/layers/
mod.rs1use crate::error::Result;
8use ndarray::{Array, ScalarOperand};
9use num_traits::Float;
10use std::fmt::Debug;
11
12pub trait Layer<F: Float + Debug + ScalarOperand>: Send + Sync {
18 fn forward(&self, input: &Array<F, ndarray::IxDyn>) -> Result<Array<F, ndarray::IxDyn>>;
22
23 fn backward(
28 &self,
29 input: &Array<F, ndarray::IxDyn>,
30 grad_output: &Array<F, ndarray::IxDyn>,
31 ) -> Result<Array<F, ndarray::IxDyn>>;
32
33 fn update(&mut self, learningrate: F) -> Result<()>;
35
36 fn as_any(&self) -> &dyn std::any::Any;
38
39 fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
41
42 fn params(&self) -> Vec<Array<F, ndarray::IxDyn>> {
44 Vec::new()
45 }
46
47 fn gradients(&self) -> Vec<Array<F, ndarray::IxDyn>> {
49 Vec::new()
50 }
51
52 fn set_gradients(&mut self, _gradients: &[Array<F, ndarray::IxDyn>]) -> Result<()> {
54 Ok(())
55 }
56
57 fn set_params(&mut self, _params: &[Array<F, ndarray::IxDyn>]) -> Result<()> {
59 Ok(())
60 }
61
62 fn set_training(&mut self, _training: bool) {
64 }
66
67 fn is_training(&self) -> bool {
69 true }
71
72 fn layer_type(&self) -> &str {
74 "Unknown"
75 }
76
77 fn parameter_count(&self) -> usize {
79 0
80 }
81
82 fn layer_description(&self) -> String {
84 format!("type:{}", self.layer_type())
85 }
86
87 fn inputshape(&self) -> Option<Vec<usize>> {
89 None
90 }
91
92 fn outputshape(&self) -> Option<Vec<usize>> {
94 None
95 }
96
97 fn name(&self) -> Option<&str> {
99 None
100 }
101}
102
103pub trait ParamLayer<F: Float + Debug + ScalarOperand>: Layer<F> {
105 fn get_parameters(&self) -> Vec<Array<F, ndarray::IxDyn>>;
107
108 fn get_gradients(&self) -> Vec<Array<F, ndarray::IxDyn>>;
110
111 fn set_parameters(&mut self, params: Vec<Array<F, ndarray::IxDyn>>) -> Result<()>;
113}
114
115#[derive(Debug, Clone)]
117pub struct LayerInfo {
118 pub index: usize,
120 pub name: String,
122 pub layer_type: String,
124 pub parameter_count: usize,
126 pub inputshape: Option<Vec<usize>>,
128 pub outputshape: Option<Vec<usize>>,
130}
131
132pub struct Sequential<F: Float + Debug + ScalarOperand> {
137 layers: Vec<Box<dyn Layer<F> + Send + Sync>>,
138 training: bool,
139}
140
141impl<F: Float + Debug + ScalarOperand> std::fmt::Debug for Sequential<F> {
142 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
143 f.debug_struct("Sequential")
144 .field("num_layers", &self.layers.len())
145 .field("training", &self.training)
146 .finish()
147 }
148}
149
150impl<F: Float + Debug + ScalarOperand + 'static> Clone for Sequential<F> {
151 fn clone(&self) -> Self {
152 Self {
155 layers: Vec::new(),
156 training: self.training,
157 }
158 }
159}
160
161impl<F: Float + Debug + ScalarOperand> Default for Sequential<F> {
162 fn default() -> Self {
163 Self::new()
164 }
165}
166
167impl<F: Float + Debug + ScalarOperand> Sequential<F> {
168 pub fn new() -> Self {
170 Self {
171 layers: Vec::new(),
172 training: true,
173 }
174 }
175
176 pub fn add<L: Layer<F> + Send + Sync + 'static>(&mut self, layer: L) {
178 self.layers.push(Box::new(layer));
179 }
180
181 pub fn len(&self) -> usize {
183 self.layers.len()
184 }
185
186 pub fn is_empty(&self) -> bool {
188 self.layers.is_empty()
189 }
190
191 pub fn total_parameters(&self) -> usize {
193 self.layers
194 .iter()
195 .map(|layer| layer.parameter_count())
196 .sum()
197 }
198
199 pub fn layer_info(&self) -> Vec<LayerInfo> {
201 self.layers
202 .iter()
203 .enumerate()
204 .map(|(i, layer)| LayerInfo {
205 index: i,
206 name: layer.name().unwrap_or(&format!("Layer_{i}")).to_string(),
207 layer_type: layer.layer_type().to_string(),
208 parameter_count: layer.parameter_count(),
209 inputshape: layer.inputshape(),
210 outputshape: layer.outputshape(),
211 })
212 .collect()
213 }
214}
215
216impl<F: Float + Debug + ScalarOperand> Layer<F> for Sequential<F> {
217 fn forward(&self, input: &Array<F, ndarray::IxDyn>) -> Result<Array<F, ndarray::IxDyn>> {
218 let mut output = input.clone();
219 for layer in &self.layers {
220 output = layer.forward(&output)?;
221 }
222 Ok(output)
223 }
224
225 fn backward(
226 &self,
227 _input: &Array<F, ndarray::IxDyn>,
228 grad_output: &Array<F, ndarray::IxDyn>,
229 ) -> Result<Array<F, ndarray::IxDyn>> {
230 Ok(grad_output.clone())
233 }
234
235 fn update(&mut self, learningrate: F) -> Result<()> {
236 for layer in &mut self.layers {
237 layer.update(learningrate)?;
238 }
239 Ok(())
240 }
241
242 fn params(&self) -> Vec<Array<F, ndarray::IxDyn>> {
243 let mut params = Vec::new();
244 for layer in &self.layers {
245 params.extend(layer.params());
246 }
247 params
248 }
249
250 fn set_training(&mut self, training: bool) {
251 self.training = training;
252 for layer in &mut self.layers {
253 layer.set_training(training);
254 }
255 }
256
257 fn is_training(&self) -> bool {
258 self.training
259 }
260
261 fn as_any(&self) -> &dyn std::any::Any {
262 self
263 }
264
265 fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
266 self
267 }
268
269 fn layer_type(&self) -> &str {
270 "Sequential"
271 }
272
273 fn parameter_count(&self) -> usize {
274 self.layers
275 .iter()
276 .map(|layer| layer.parameter_count())
277 .sum()
278 }
279}
280
281#[derive(Debug, Clone)]
283pub enum LayerConfig {
284 Dense {
286 input_size: usize,
287 output_size: usize,
288 activation: Option<String>,
289 },
290 Conv2D {
292 in_channels: usize,
293 out_channels: usize,
294 kernel_size: (usize, usize),
295 },
296 Dropout { rate: f64 },
298}
299
300pub mod conv;
302pub mod dense;
303pub mod dropout;
304pub mod normalization;
305pub mod recurrent;
306
307pub use conv::Conv2D;
314pub use dense::Dense;
315pub use dropout::Dropout;
316pub use normalization::{BatchNorm, LayerNorm};
317pub use recurrent::LSTM;
318
319