scirs2_neural/layers/
mod.rs1use crate::error::Result;
8use scirs2_core::ndarray::{Array, ScalarOperand};
9use scirs2_core::numeric::Float;
10use std::fmt::Debug;
11
12pub trait Layer<F: Float + Debug + ScalarOperand>: Send + Sync {
18 fn forward(
22 &self,
23 input: &Array<F, scirs2_core::ndarray::IxDyn>,
24 ) -> Result<Array<F, scirs2_core::ndarray::IxDyn>>;
25
26 fn backward(
31 &self,
32 input: &Array<F, scirs2_core::ndarray::IxDyn>,
33 grad_output: &Array<F, scirs2_core::ndarray::IxDyn>,
34 ) -> Result<Array<F, scirs2_core::ndarray::IxDyn>>;
35
36 fn update(&mut self, learningrate: F) -> Result<()>;
38
39 fn as_any(&self) -> &dyn std::any::Any;
41
42 fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
44
45 fn params(&self) -> Vec<Array<F, scirs2_core::ndarray::IxDyn>> {
47 Vec::new()
48 }
49
50 fn gradients(&self) -> Vec<Array<F, scirs2_core::ndarray::IxDyn>> {
52 Vec::new()
53 }
54
55 fn set_gradients(
57 &mut self,
58 _gradients: &[Array<F, scirs2_core::ndarray::IxDyn>],
59 ) -> Result<()> {
60 Ok(())
61 }
62
63 fn set_params(&mut self, _params: &[Array<F, scirs2_core::ndarray::IxDyn>]) -> Result<()> {
65 Ok(())
66 }
67
68 fn set_training(&mut self, _training: bool) {
70 }
72
73 fn is_training(&self) -> bool {
75 true }
77
78 fn layer_type(&self) -> &str {
80 "Unknown"
81 }
82
83 fn parameter_count(&self) -> usize {
85 0
86 }
87
88 fn layer_description(&self) -> String {
90 format!("type:{}", self.layer_type())
91 }
92
93 fn inputshape(&self) -> Option<Vec<usize>> {
95 None
96 }
97
98 fn outputshape(&self) -> Option<Vec<usize>> {
100 None
101 }
102
103 fn name(&self) -> Option<&str> {
105 None
106 }
107}
108
109pub trait ParamLayer<F: Float + Debug + ScalarOperand>: Layer<F> {
111 fn get_parameters(&self) -> Vec<Array<F, scirs2_core::ndarray::IxDyn>>;
113
114 fn get_gradients(&self) -> Vec<Array<F, scirs2_core::ndarray::IxDyn>>;
116
117 fn set_parameters(&mut self, params: Vec<Array<F, scirs2_core::ndarray::IxDyn>>) -> Result<()>;
119}
120
121#[derive(Debug, Clone)]
123pub struct LayerInfo {
124 pub index: usize,
126 pub name: String,
128 pub layer_type: String,
130 pub parameter_count: usize,
132 pub inputshape: Option<Vec<usize>>,
134 pub outputshape: Option<Vec<usize>>,
136}
137
138pub struct Sequential<F: Float + Debug + ScalarOperand> {
143 layers: Vec<Box<dyn Layer<F> + Send + Sync>>,
144 training: bool,
145}
146
147impl<F: Float + Debug + ScalarOperand> std::fmt::Debug for Sequential<F> {
148 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
149 f.debug_struct("Sequential")
150 .field("num_layers", &self.layers.len())
151 .field("training", &self.training)
152 .finish()
153 }
154}
155
156impl<F: Float + Debug + ScalarOperand + 'static> Clone for Sequential<F> {
157 fn clone(&self) -> Self {
158 Self {
161 layers: Vec::new(),
162 training: self.training,
163 }
164 }
165}
166
167impl<F: Float + Debug + ScalarOperand> Default for Sequential<F> {
168 fn default() -> Self {
169 Self::new()
170 }
171}
172
173impl<F: Float + Debug + ScalarOperand> Sequential<F> {
174 pub fn new() -> Self {
176 Self {
177 layers: Vec::new(),
178 training: true,
179 }
180 }
181
182 pub fn add<L: Layer<F> + Send + Sync + 'static>(&mut self, layer: L) {
184 self.layers.push(Box::new(layer));
185 }
186
187 pub fn len(&self) -> usize {
189 self.layers.len()
190 }
191
192 pub fn is_empty(&self) -> bool {
194 self.layers.is_empty()
195 }
196
197 pub fn total_parameters(&self) -> usize {
199 self.layers
200 .iter()
201 .map(|layer| layer.parameter_count())
202 .sum()
203 }
204
205 pub fn layer_info(&self) -> Vec<LayerInfo> {
207 self.layers
208 .iter()
209 .enumerate()
210 .map(|(i, layer)| LayerInfo {
211 index: i,
212 name: layer.name().unwrap_or(&format!("Layer_{i}")).to_string(),
213 layer_type: layer.layer_type().to_string(),
214 parameter_count: layer.parameter_count(),
215 inputshape: layer.inputshape(),
216 outputshape: layer.outputshape(),
217 })
218 .collect()
219 }
220}
221
222impl<F: Float + Debug + ScalarOperand> Layer<F> for Sequential<F> {
223 fn forward(
224 &self,
225 input: &Array<F, scirs2_core::ndarray::IxDyn>,
226 ) -> Result<Array<F, scirs2_core::ndarray::IxDyn>> {
227 let mut output = input.clone();
228 for layer in &self.layers {
229 output = layer.forward(&output)?;
230 }
231 Ok(output)
232 }
233
234 fn backward(
235 &self,
236 _input: &Array<F, scirs2_core::ndarray::IxDyn>,
237 grad_output: &Array<F, scirs2_core::ndarray::IxDyn>,
238 ) -> Result<Array<F, scirs2_core::ndarray::IxDyn>> {
239 Ok(grad_output.clone())
242 }
243
244 fn update(&mut self, learningrate: F) -> Result<()> {
245 for layer in &mut self.layers {
246 layer.update(learningrate)?;
247 }
248 Ok(())
249 }
250
251 fn params(&self) -> Vec<Array<F, scirs2_core::ndarray::IxDyn>> {
252 let mut params = Vec::new();
253 for layer in &self.layers {
254 params.extend(layer.params());
255 }
256 params
257 }
258
259 fn set_training(&mut self, training: bool) {
260 self.training = training;
261 for layer in &mut self.layers {
262 layer.set_training(training);
263 }
264 }
265
266 fn is_training(&self) -> bool {
267 self.training
268 }
269
270 fn as_any(&self) -> &dyn std::any::Any {
271 self
272 }
273
274 fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
275 self
276 }
277
278 fn layer_type(&self) -> &str {
279 "Sequential"
280 }
281
282 fn parameter_count(&self) -> usize {
283 self.layers
284 .iter()
285 .map(|layer| layer.parameter_count())
286 .sum()
287 }
288}
289
290#[derive(Debug, Clone)]
292pub enum LayerConfig {
293 Dense {
295 input_size: usize,
296 output_size: usize,
297 activation: Option<String>,
298 },
299 Conv2D {
301 in_channels: usize,
302 out_channels: usize,
303 kernel_size: (usize, usize),
304 },
305 Dropout { rate: f64 },
307}
308
309pub mod conv;
311pub mod dense;
312pub mod dropout;
313pub mod normalization;
314pub mod recurrent;
315
316pub use conv::Conv2D;
323pub use dense::Dense;
324pub use dropout::Dropout;
325pub use normalization::{BatchNorm, LayerNorm};
326pub use recurrent::LSTM;
327
328