Skip to main content

rnn/layers/
layers.rs

1use crate::activations::ActivationKind;
2
3#[derive(Clone, Copy, Debug, PartialEq, Eq)]
4pub enum LayerError {
5    EmptyPlan,
6    InvalidShape,
7    InvalidRange,
8    IncompatibleChain,
9    BufferTooSmall,
10    CountMismatch,
11}
12
13#[derive(Clone, Copy, Debug, PartialEq, Eq)]
14pub struct DenseLayerDesc {
15    pub input_size: usize,
16    pub output_size: usize,
17    pub weight_offset: usize,
18    pub bias_offset: usize,
19    pub activation: ActivationKind,
20}
21
22impl DenseLayerDesc {
23    pub fn weight_len(&self) -> Option<usize> {
24        self.input_size.checked_mul(self.output_size)
25    }
26
27    pub fn validate_ranges(&self, weights_len: usize, biases_len: usize) -> bool {
28        if self.input_size == 0 || self.output_size == 0 {
29            return false;
30        }
31        let w_len = match self.weight_len() {
32            Some(v) => v,
33            None => return false,
34        };
35        let w_end = match self.weight_offset.checked_add(w_len) {
36            Some(v) => v,
37            None => return false,
38        };
39        let b_end = match self.bias_offset.checked_add(self.output_size) {
40            Some(v) => v,
41            None => return false,
42        };
43        w_end <= weights_len && b_end <= biases_len
44    }
45}
46
47#[derive(Clone, Copy, Debug, PartialEq, Eq)]
48pub enum LayerSpec {
49    Dense(DenseLayerDesc),
50}
51
52impl LayerSpec {
53    pub fn input_size(&self) -> usize {
54        match self {
55            LayerSpec::Dense(d) => d.input_size,
56        }
57    }
58
59    pub fn output_size(&self) -> usize {
60        match self {
61            LayerSpec::Dense(d) => d.output_size,
62        }
63    }
64
65    pub fn validate_ranges(&self, weights_len: usize, biases_len: usize) -> bool {
66        match self {
67            LayerSpec::Dense(d) => d.validate_ranges(weights_len, biases_len),
68        }
69    }
70}
71
72pub struct LayerPlan<'a> {
73    pub layers: &'a [LayerSpec],
74    pub weights: &'a [f32],
75    pub biases: &'a [f32],
76}
77
78impl<'a> LayerPlan<'a> {
79    pub fn validate(&self) -> Result<(), LayerError> {
80        if self.layers.is_empty() {
81            return Err(LayerError::EmptyPlan);
82        }
83
84        let mut prev_out = self.layers[0].input_size();
85        if prev_out == 0 {
86            return Err(LayerError::InvalidShape);
87        }
88
89        for (idx, layer) in self.layers.iter().enumerate() {
90            if idx > 0 && layer.input_size() != prev_out {
91                return Err(LayerError::IncompatibleChain);
92            }
93            if layer.output_size() == 0 {
94                return Err(LayerError::InvalidShape);
95            }
96            if !layer.validate_ranges(self.weights.len(), self.biases.len()) {
97                return Err(LayerError::InvalidRange);
98            }
99            prev_out = layer.output_size();
100        }
101
102        Ok(())
103    }
104
105    pub fn input_size(&self) -> Option<usize> {
106        self.layers.first().map(|x| x.input_size())
107    }
108
109    pub fn output_size(&self) -> Option<usize> {
110        self.layers.last().map(|x| x.output_size())
111    }
112
113    pub fn max_width(&self) -> Option<usize> {
114        let mut max_w = 0usize;
115        for l in self.layers {
116            let in_w = l.input_size();
117            let out_w = l.output_size();
118            if in_w > max_w { max_w = in_w; }
119            if out_w > max_w { max_w = out_w; }
120        }
121        if max_w == 0 { None } else { Some(max_w) }
122    }
123
124    pub fn total_neurons(&self) -> Option<usize> {
125        let mut total = 0usize;
126        for layer in self.layers {
127            total = total.checked_add(layer.input_size())?;
128        }
129        total.checked_add(self.output_size()?)
130    }
131}
132
133pub fn build_dense_specs_from_layers(
134    layers: &[usize],
135    hidden_activation: ActivationKind,
136    output_activation: ActivationKind,
137    weights_len: usize,
138    biases_len: usize,
139    out: &mut [LayerSpec],
140) -> Result<usize, LayerError> {
141    if layers.len() < 2 {
142        return Err(LayerError::InvalidShape);
143    }
144
145    let layer_count = layers.len() - 1;
146    if out.len() < layer_count {
147        return Err(LayerError::BufferTooSmall);
148    }
149
150    let mut w_off = 0usize;
151    let mut b_off = 0usize;
152
153    for i in 0..layer_count {
154        let input_size = layers[i];
155        let output_size = layers[i + 1];
156        if input_size == 0 || output_size == 0 {
157            return Err(LayerError::InvalidShape);
158        }
159
160        let weight_len = input_size.checked_mul(output_size).ok_or(LayerError::InvalidRange)?;
161        let activation = if i + 1 == layer_count {
162            output_activation
163        } else {
164            hidden_activation
165        };
166
167        out[i] = LayerSpec::Dense(DenseLayerDesc {
168            input_size,
169            output_size,
170            weight_offset: w_off,
171            bias_offset: b_off,
172            activation,
173        });
174
175        w_off = w_off.checked_add(weight_len).ok_or(LayerError::InvalidRange)?;
176        b_off = b_off.checked_add(output_size).ok_or(LayerError::InvalidRange)?;
177    }
178
179    if w_off != weights_len || b_off != biases_len {
180        return Err(LayerError::CountMismatch);
181    }
182
183    Ok(layer_count)
184}