1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
use rand;
use rand::{Rng, StdRng, SeedableRng};

use ea::*;
use neuro::{ActivationFunctionType, MultilayeredNetwork, NeuralArchitecture, NeuralNetwork};
use problem::*;


//--------------------------------------------

/// Trait for problem where NN is a solution.
///
/// # Example: Custom NE problem
/// ```
/// extern crate revonet;
/// extern crate rand;
///
/// use rand::{Rng, SeedableRng, StdRng};
///
/// use revonet::ea::*;
/// use revonet::ne::*;
/// use revonet::neuro::*;
/// use revonet::neproblem::*;
///
/// // Dummy problem returning random fitness.
/// struct RandomNEProblem {}
///
/// impl RandomNEProblem {
///     fn new() -> RandomNEProblem {
///         RandomNEProblem{}
///     }
/// }
///
/// impl NeuroProblem for RandomNEProblem {
///     // return number of NN inputs.
///     fn get_inputs_num(&self) -> usize {1}
///     // return number of NN outputs.
///     fn get_outputs_num(&self) -> usize {1}
///     // return NN with random weights and a fixed structure. For now the structure should be the same all the time to make sure that crossover is possible. Likely to change in the future.
///     fn get_default_net(&self) -> MultilayeredNetwork {
///         let mut rng = rand::thread_rng();
///         let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num());
///         net.add_hidden_layer(5 as usize, ActivationFunctionType::Sigmoid)
///             .build(&mut rng, NeuralArchitecture::Multilayered);
///         net
///     }
///
///     // Function to evaluate performance of a given NN.
///     fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 {
///         let mut rng: StdRng = StdRng::from_seed(&[0]);
///
///         let mut input = (0..self.get_inputs_num())
///                             .map(|_| rng.gen::<f32>())
///                             .collect::<Vec<f32>>();
///         // compute NN output using random input.
///         let mut output = nn.compute(&input);
///         output[0]
///     }
/// }
///
/// fn main() {}
/// ```
pub trait NeuroProblem: Problem {
    /// Number of input variables.
    fn get_inputs_num(&self) -> usize;
    /// Number of output (target) variables.
    fn get_outputs_num(&self) -> usize;
    /// Returns random network with default number of inputs and outputs and some predefined structure.
    ///
    /// For now all networks returned by implementation of this functions have the same structure and
    /// random weights. This was done to ensure possibility to cross NN's and might change in the future.
    fn get_default_net(&self) -> MultilayeredNetwork;

    /// Compute fitness value for the given neural network.
    ///
    /// # Arguments:
    /// * `net` - neural network to compute fitness for.
    fn compute_with_net<T: NeuralNetwork>(&self, net: &mut T) -> f32;
}

/// Default implementation of the `Problem` trait for `NeuroProblem`
#[allow(unused_variables, unused_mut)]
impl<T: NeuroProblem> Problem for T {
    fn compute<I: Individual>(&self, ind: &mut I) -> f32 {
        let fitness;
        fitness = self.compute_with_net(ind.to_net_mut().expect("Can not extract mutable ANN"));
        // match ind.to_net_mut() {
        //     Some(ref mut net) => {fitness = self.compute_with_net(net);},
        //     None => panic!("NN is not defined"),
        // };
        ind.set_fitness(fitness);
        ind.get_fitness()
    }
    fn get_random_individual<U: Individual, R: Rng>(&self, size: usize, mut rng: &mut R) -> U {
        let mut res_ind = U::new();
        res_ind.set_net(self.get_default_net());
        res_ind
    }
}

///
/// Classical noiseless XOR problem with 2 binary inputs and 1 output.
///
#[allow(dead_code)]
pub struct XorProblem {}

#[allow(dead_code)]
impl XorProblem {
    pub fn new() -> XorProblem {
        XorProblem{}
    }
}

#[allow(dead_code)]
impl NeuroProblem for XorProblem {
    fn get_inputs_num(&self) -> usize {2}
    fn get_outputs_num(&self) -> usize {1}
    fn get_default_net(&self) -> MultilayeredNetwork {
        let mut rng = rand::thread_rng();
        let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num());
        net.add_hidden_layer(4 as usize, ActivationFunctionType::Sigmoid)
            .build(&mut rng, NeuralArchitecture::BypassInputs);
            // .build(&mut rng, NeuralArchitecture::BypassInputs);
        net
    }

    fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 {
        let mut er = 0f32;

        let output = nn.compute(&[0f32, 0f32]);
        er += output[0] * output[0];
        let output = nn.compute(&[1f32, 1f32]);
        er += output[0] * output[0];
        let output = nn.compute(&[0f32, 1f32]);
        er += (1f32-output[0]) * (1f32-output[0]);
        let output = nn.compute(&[1f32, 0f32]);
        er += (1f32-output[0]) * (1f32-output[0]);

        er
    }
}

///
/// Problem which is typically used to test GP algorithms. Represents symbolic regression with
/// 1 input and 1 output. There are three variants:
/// * `f` - 4-th order polynomial.
/// * `g` - 5-th order polynomial.
/// * `h` - 6-th order polynomial.
///
/// See for details: Luke S. Essentials of metaheuristics.
///
#[allow(dead_code)]
pub struct SymbolicRegressionProblem {
    func: fn(&SymbolicRegressionProblem, f32) -> f32,
}

#[allow(dead_code)]
impl SymbolicRegressionProblem {
    /// Create a new problem depending on the problem type:
    /// * `f` - 4-th order polynomial.
    /// * `g` - 5-th order polynomial.
    /// * `h` - 6-th order polynomial.
    ///
    /// # Arguments:
    /// * `problem_type` - symbol from set `('f', 'g', 'h')` to set the problem type.
    pub fn new(problem_type: char) -> SymbolicRegressionProblem {
        match problem_type {
            'f' => SymbolicRegressionProblem::new_f(),
            'g' => SymbolicRegressionProblem::new_g(),
            'h' => SymbolicRegressionProblem::new_h(),
            _ => {
                panic!(format!("Unknown problem type for symbolic regression problem: {}",
                               problem_type))
            }
        }
    }

    /// Create `f`-type problem (4-th order polynomial)
    pub fn new_f() -> SymbolicRegressionProblem {
        SymbolicRegressionProblem { func: SymbolicRegressionProblem::f }
    }

    /// Create `g`-type problem (4-th order polynomial)
    pub fn new_g() -> SymbolicRegressionProblem {
        SymbolicRegressionProblem { func: SymbolicRegressionProblem::g }
    }

    /// Create `h`-type problem (4-th order polynomial)
    pub fn new_h() -> SymbolicRegressionProblem {
        SymbolicRegressionProblem { func: SymbolicRegressionProblem::h }
    }

    fn f(&self, x: f32) -> f32 {
        let x2 = x * x;
        x2 * x2 + x2 * x + x2 + x
    }

    fn g(&self, x: f32) -> f32 {
        let x2 = x * x;
        x2 * x2 * x - 2f32 * x2 * x + x
    }

    fn h(&self, x: f32) -> f32 {
        let x2 = x * x;
        x2 * x2 * x2 - 2f32 * x2 * x2 + x2
    }
}

impl NeuroProblem for SymbolicRegressionProblem {
    fn get_inputs_num(&self) -> usize { 1 }
    fn get_outputs_num(&self) -> usize { 1 }
    fn get_default_net(&self) -> MultilayeredNetwork {
        let mut rng = rand::thread_rng();
        let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.get_inputs_num(), self.get_outputs_num());
        net.add_hidden_layer(5 as usize, ActivationFunctionType::Sigmoid)
            .build(&mut rng, NeuralArchitecture::Multilayered);
        net
    }

    fn compute_with_net<T: NeuralNetwork>(&self, nn: &mut T) -> f32 {
        const PTS_COUNT: u32 = 20;

        let mut er = 0f32;
        let mut input = vec![0f32];
        let mut output;

        let mut rng: StdRng = StdRng::from_seed(&[0]);
        for _ in 0..PTS_COUNT {
            let x = rng.gen::<f32>(); // sample from [-1, 1]
            let y = (self.func)(&self, x);

            input[0] = x;
            output = nn.compute(&input);

            er += (output[0] - y).abs();
        }
        er
    }
}

//=========================================================

#[cfg(test)]
#[allow(unused_imports)]
mod test {
    use rand;

    use math::*;
    use ne::*;
    use neproblem::*;
    use problem::*;
    use settings::*;

    #[test]
    fn test_xor_problem() {
        let (pop_size, gen_count, param_count) = (20, 20, 100); // gene_count does not matter here as NN structure is defined by a problem.
        let settings = EASettings::new(pop_size, gen_count, param_count);
        let problem = XorProblem::new();

        let mut ne: NE<XorProblem> = NE::new(&problem);
        let res = ne.run(settings).expect("Error: NE result is empty");
        println!("result: {:?}", res);
        println!("\nbest individual: {:?}", res.best);
    }

    #[test]
    fn test_symb_regression_problem() {
        for prob_type in vec!['f', 'g', 'h'] {
            let mut rng = rand::thread_rng();
            let prob = SymbolicRegressionProblem::new(prob_type);
            println!("Created problem of type: {}", prob_type);

            let mut net = prob.get_default_net();
            println!("Created default net with {} inputs, {} outputs, and {} hidden layers ", net.get_inputs_num(), net.get_outputs_num(), net.len()-1);
            println!("  Network weights: {:?}", net.get_weights());
            let mut ind: NEIndividual = prob.get_random_individual(0, &mut rng);
            println!("  Random individual: {:?}", ind.to_vec().unwrap());
            println!("  Random individual ANN: {:?}", ind.to_net().unwrap());

            let input_size = net.get_inputs_num();
            let mut ys = Vec::with_capacity(100);
            for _ in 0..100 {
                let x = rand_vector_std_gauss(input_size, &mut rng);
                let y = net.compute(&x);
                ys.push(y);
            }
            println!("  Network outputs for 100 random inputs: {:?}", ys);
            println!("  Network evaluation: {:?}\n", prob.compute_with_net(&mut net));
        }
    }
}