concision_core/nn/
model.rs

1/*
2    Appellation: model <traits>
3    Contrib: FL03 <jo3mccain@icloud.com>
4*/
5pub use self::module::*;
6
7pub mod config;
8pub mod module;
9#[doc(hidden)]
10pub mod repo;
11
12pub(crate) mod prelude {
13    pub use super::config::*;
14    pub use super::module::*;
15    pub use super::Model;
16}
17
18use crate::traits::Forward;
19
20pub trait Model: Module
21where
22    Self: Forward<Self::Data>,
23{
24    type Ctx;
25    type Data;
26
27    fn children(&self) -> Vec<ModuleDyn<Self::Ctx, Self::Params>>;
28
29    fn context(&self) -> Self::Ctx;
30}
31
32/// This trait describes any neural networks or models that
33/// adhears to the deep netural network architecture.
34/// This design considers a single input and output layer, while
35/// allowing for any number of hidden layers to be persisted.
36///
37/// The `HIDDEN` constant is used to specify the number of hidden layers
38/// and is used to compute the total number of layers (HIDDEN + 2)
39pub trait DeepNeuralNetwork<S, T>: Forward<S, Output = T> {
40    const HIDDEN: Option<usize> = None;
41
42    type Input: Forward<S, Output = T>;
43    type Hidden: Forward<T, Output = T>; // The type of `hidden` layers; all hidden layers implement the same activation function
44    type Out: Forward<T, Output = T>;
45
46    fn input(&self) -> &Self::Input;
47
48    fn hidden(&self) -> &[Self::Hidden];
49
50    fn output(&self) -> &Self::Out;
51
52    fn nlayers(&self) -> usize {
53        self.nhidden() + 2
54    }
55
56    fn nhidden(&self) -> usize {
57        Self::HIDDEN.unwrap_or_else(|| self.hidden().len())
58    }
59}