1extern crate ndarray;
2extern crate ndarray_rand;
3
4use ndarray::prelude::*;
5use ndarray_rand::rand_distr::Uniform;
6use ndarray_rand::RandomExt;
7use std::sync::RwLock;
8
9pub trait Layer1d {
10 fn pass(&self, input_array: Array1<f64>) -> (Array1<f64>, Array1<f64>); }
36
37pub struct Dense1d {
38 activation: fn(Array1<f64>) -> Array1<f64>,
39 deriv_activation: fn(Array1<f64>) -> Array1<f64>,
40 weights: RwLock<Array2<f64>>,
41 bias: RwLock<Array1<f64>>,
42}
43
44impl Dense1d {
45 pub fn from(
69 activation: fn(Array1<f64>) -> Array1<f64>,
70 deriv_activation: fn(Array1<f64>) -> Array1<f64>,
71 weights: Array2<f64>,
72 bias: Array1<f64>,
73 ) -> Self {
74 Self {
75 activation,
76 deriv_activation,
77 weights: RwLock::new(weights),
78 bias: RwLock::new(bias),
79 }
80 }
81
82 pub fn new(
107 input_size: usize,
108 layer_size: usize,
109 activation_fn: fn(Array1<f64>) -> Array1<f64>,
110 deriv_activation_fn: fn(Array1<f64>) -> Array1<f64>,
111 ) -> Self {
112 Self {
113 activation: activation_fn,
114 deriv_activation: deriv_activation_fn,
115 weights: RwLock::new(Array2::random(
116 (layer_size, input_size),
117 Uniform::new(-1., 1.),
118 )),
119 bias: RwLock::new(Array1::random(layer_size, Uniform::new(-1., 1.))),
120 }
121 }
122}
123
124impl Layer1d for Dense1d {
125 fn pass(&self, input_array: Array1<f64>) -> (Array1<f64>, Array1<f64>) {
126 let weights = self.weights.read().unwrap();
127 let bias = self.bias.read().unwrap();
128
129 assert_eq!(
130 weights.shape()[1],
131 input_array.shape()[0],
132 "Layer input size is {}, \
133 Layer was given size of {}",
134 weights.shape()[1],
135 input_array.shape()[0]
136 );
137
138 let z = weights.dot(&input_array) + &*bias;
139 let a = (self.activation)(z.clone());
140 (z, a)
141 }
142}
143
144
145#[cfg(test)]
146mod layers_tests {
147 use super::*;
148 use ndarray::*;
149 use crate::activations::*;
150
151 #[test]
152 fn dense1d_pass_arr1_1() {
153 let layer = Dense1d::from(
154 |x| x,
155 |x| x,
156 arr2(&[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]),
157 arr1(&[1., 1., 1.]),
158 );
159 let input_array = arr1(&[1., 1., 1.]);
160
161 assert_eq!(layer.pass(input_array).1, arr1(&[4., 4., 4.]))
162 }
163
164 #[test]
165 fn dense1d_pass_arr1_2() {
166 let layer = Dense1d::from(
167 |x| x,
168 |x| x,
169 arr2(&[
170 [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
171 [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
172 [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
173 ]),
174 arr1(&[1., 1., 1.]),
175 );
176 let input_array = arr1(&[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]);
177
178 assert_eq!(layer.pass(input_array).1, arr1(&[13.0, 13.0, 13.0]))
179 }
180
181 #[test]
182 #[should_panic]
183 fn dense1d_pass_arr1_diff_size() {
184 let layer = Dense1d::from(
185 |x| x,
186 |x| x,
187 arr2(&[[1., 1., 1., 1.], [1., 1., 1., 1.]]),
188 arr1(&[0., 0.]),
189 );
190 let input_array = arr1(&[1.]);
191
192 layer.pass(input_array);
193 }
194
195 #[test]
196 fn dense1d_new() {
197 let layer = Dense1d::new(5, 10, |x| x, |x| x);
198
199 let input_array = arr1(&[1., 1., 1., 1., 1.]);
200
201 layer.pass(input_array);
202 }
203
204 #[test]
205 fn dense1d_activation() {
206 let layer = Dense1d::from(
207 relu_1d,
208 deriv_relu_1d,
209 arr2(&[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]),
210 arr1(&[-10., -10., 1.]),
211 );
212 let input_array = arr1(&[1., 1., 1.]);
213
214 assert_eq!(layer.pass(input_array).1, arr1(&[0., 0., 4.]))
215 }
216}