prophet/
topology.rs

1//! Provides operations, data structures and error definitions for Disciple objects
2//! which form the basis for topologies of neural networks.
3
4use std::slice::Iter;
5use activation::Activation;
6
7/// Represents the topology element for a fully connected layer
8/// with input neurons, output neurons and an activation function.
9#[derive(Debug, Copy, Clone, PartialEq, Eq)]
10pub struct Layer{
11	/// Number of input neurons to this layer.
12	pub inputs: usize,
13
14	/// Numer of output neurons from this layer.
15	pub outputs: usize,
16
17	/// Activation function for this layer.
18	pub activation: Activation
19}
20
21impl Layer {
22	/// Create a new layer.
23	fn new(inputs: usize, outputs: usize, activation: Activation) -> Self {
24		Layer{
25			inputs: inputs,
26			outputs: outputs,
27			activation: activation
28		}
29	}
30}
31
32/// Used to build topologies and do some minor compile-time and 
33/// runtime checks to enforce validity of the topology as a shape for neural nets.
34/// 
35/// Can be used by `Mentor` types to train it and become a trained neural network
36/// with which the user can predict data.
37#[derive(Debug, Clone, PartialEq, Eq)]
38pub struct TopologyBuilder {
39	last  : usize,
40	layers: Vec<Layer>
41}
42
43/// Represents the neural network topology.
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub struct Topology {
46	layers: Vec<Layer>
47}
48
49impl Topology {
50	/// Creates a new topology.
51	/// 
52	/// # Panics
53	/// 
54	/// If size is zero.
55	pub fn input(size: usize) -> TopologyBuilder {
56		assert!(size >= 1, "cannot define a zero-sized input layer");
57
58		TopologyBuilder{
59			last  : size,
60			layers: vec![]
61		}
62	}
63
64	/// Returns the number of input neurons.
65	///
66	/// Used by mentors to validate their sample sizes.
67	pub fn len_input(&self) -> usize {
68		self.layers
69			.first()
70			.expect("a finished disciple must have a valid first layer!")
71			.inputs
72	}
73
74	/// Returns the number of output neurons.
75	///
76	/// Used by mentors to validate their sample sizes.
77	pub fn len_output(&self) -> usize {
78		self.layers
79			.last()
80			.expect("a finished disciple must have a valid last layer!")
81			.outputs
82	}
83
84	/// Iterates over the layer sizes of this topology.
85	pub fn iter_layers(&self) -> Iter<Layer> {
86		self.layers.iter()
87	}
88}
89
90impl TopologyBuilder {
91	fn push_layer(&mut self, layer_size: usize, act: Activation) {
92		assert!(layer_size >= 1, "cannot define a zero-sized hidden layer");
93
94		self.layers.push(Layer::new(self.last, layer_size, act));
95		self.last = layer_size;
96	}
97
98	/// Adds a hidden layer to this topology with the given amount of neurons.
99	///
100	/// Bias-Neurons are implicitely added!
101	/// 
102	/// # Panics
103	/// 
104	/// If `layer_size` is zero.
105	pub fn layer(mut self, layer_size: usize, act: Activation) -> TopologyBuilder {
106		self.push_layer(layer_size, act);
107		self
108	}
109
110	/// Adds some hidden layers to this topology with the given amount of neurons.
111	///
112	/// Bias-Neurons are implicitely added!
113	/// 
114	/// # Panics
115	/// 
116	/// If any of the specified layer sizes is zero.
117	pub fn layers(mut self, layers: &[(usize, Activation)]) -> TopologyBuilder {
118		for &layer in layers {
119			self.push_layer(layer.0, layer.1);
120		}
121		self
122	}
123
124	/// Finishes constructing a topology by defining its output layer neurons.
125	///
126	/// Bias-Neurons are implicitely added!
127	/// 
128	/// # Panics
129	/// 
130	/// If `layer_size` is zero.
131	pub fn output(mut self, layer_size: usize, act: Activation) -> Topology {
132		assert!(layer_size >= 1, "cannot define a zero-sized output layer");
133
134		self.push_layer(layer_size, act);
135		Topology {
136			layers: self.layers,
137		}
138	}
139}
140
141#[cfg(test)]
142mod tests {
143	use super::*;
144
145	#[test]
146	fn construction() {
147		use self::Activation::{Logistic, Identity, ReLU, Tanh};
148		let dis = Topology::input(2)
149			.layer(5, Logistic)
150			.layers(&[
151				(10, Identity),
152				(10, ReLU)
153			])
154			.output(5, Tanh);
155		let mut it = dis.iter_layers()
156			.map(|&size| size);
157		assert_eq!(it.next(), Some(Layer::new(2, 5, Logistic)));
158		assert_eq!(it.next(), Some(Layer::new(5, 10, Identity)));
159		assert_eq!(it.next(), Some(Layer::new(10, 10, ReLU)));
160		assert_eq!(it.next(), Some(Layer::new(10, 5, Tanh)));
161	}
162}