use crate::prelude::*;
use async_trait::async_trait;
pub trait TrainingLabel: Eq + Clone + Send {
fn num_labels() -> usize;
fn index(&self) -> usize;
fn from_index(idx: usize) -> Self;
fn debug_name(&self) -> String;
}
impl TrainingLabel for usize {
fn index(self: &usize) -> usize {
*self
}
fn from_index(idx: usize) -> usize {
idx
}
fn num_labels() -> usize {
usize::MAX
}
fn debug_name(&self) -> String {
self.to_string()
}
}
impl TrainingLabel for bool {
fn index(self: &bool) -> usize {
if *self {
1
} else {
0
}
}
fn from_index(idx: usize) -> bool {
idx > 0
}
fn num_labels() -> usize {
2
}
fn debug_name(&self) -> String {
self.to_string()
}
}
type DistanceWrapper = fn(f32) -> f32;
#[derive(Clone)]
pub struct LabeledLearningFrame<LabelType>
where
LabelType: TrainingLabel,
{
inputs: Vec<(Vec<f32>, LabelType)>,
distance_wrapper: Box<DistanceWrapper>,
}
impl<T> LabeledLearningFrame<T>
where
T: TrainingLabel,
{
pub fn new(
cases_inputs: Vec<Vec<f32>>,
cases_labels: Vec<T>,
distance_wrapper: Option<Box<DistanceWrapper>>,
) -> Result<Self, String> {
if (cfg!(debug) || cfg!(tests)) && cases_inputs.len() != cases_labels.len() {
return Err("".to_owned());
}
Ok(Self {
inputs: cases_inputs
.iter()
.cloned()
.zip(cases_labels.iter().cloned())
.collect(),
distance_wrapper: Box::from(
distance_wrapper.map_or(f32::abs as fn(f32) -> f32, |x| *x),
),
})
}
pub fn find_label_for(&self, inputs: &[f32]) -> Option<&T> {
for inp in &self.inputs {
if inp.0 == inputs {
return Some(&inp.1);
}
}
None
}
pub fn num_cases(&self) -> usize {
self.inputs.len()
}
}
pub struct NeuralClassifier {
pub classifier: SimpleNeuralNetwork,
}
impl Assembly for NeuralClassifier {
fn get_network_refs(&self) -> Vec<&SimpleNeuralNetwork> {
vec![&self.classifier]
}
fn get_networks_mut(&mut self) -> Vec<&mut SimpleNeuralNetwork> {
vec![&mut self.classifier]
}
}
#[async_trait]
impl<T> AssemblyFrame<NeuralClassifier> for LabeledLearningFrame<T>
where
T: TrainingLabel,
{
type E = String;
async fn run(&mut self, assembly: &mut NeuralClassifier) -> Result<f32, String> {
let mut fitness = 0.0_f32;
let mut outputs = vec![0.0_f32; T::num_labels()];
for (case, desired_label) in &self.inputs {
let desired_idx = desired_label.index() as usize;
assembly.classifier.compute_values(case, &mut outputs)?;
fitness -= outputs
.iter()
.enumerate()
.map(|iout| {
let (i, out) = iout;
(self.distance_wrapper)(
*out as f32 - (if i == desired_idx { 1.0 } else { 0.0 }),
)
})
.sum::<f32>()
/ outputs.len() as f32;
}
Ok(fitness)
}
}
impl<LT> LabeledLearningFrame<LT>
where
LT: TrainingLabel,
{
pub fn avg_reference_fitness(
&mut self,
assembly: &mut NeuralClassifier,
) -> Result<f32, String> {
let mut fitness = 0.0_f32;
let mut outputs = vec![0.0_f32; LT::num_labels()];
for (case, desired_label) in &self.inputs {
let desired_idx = desired_label.index() as usize;
assembly.classifier.compute_values(case, &mut outputs)?;
fitness -= outputs
.iter()
.enumerate()
.map(|iout| {
let (i, out) = iout;
(self.distance_wrapper)(
*out as f32 - (if i == desired_idx { 1.0 } else { 0.0 }),
)
})
.sum::<f32>()
/ outputs.len() as f32;
}
Ok(fitness)
}
}