nncombinator/
lib.rs

1//! nncombinator is a neural network library that allows type-safe implementation.
2
3extern crate libc;
4extern crate cuda_runtime_sys;
5extern crate rcublas_sys;
6extern crate rcublas;
7extern crate rcudnn;
8extern crate rcudnn_sys;
9
10use crate::ope::UnitValue;
11
12pub mod error;
13pub mod ope;
14pub mod mem;
15pub mod arr;
16pub mod collection;
17pub mod list;
18pub mod optimizer;
19pub mod lossfunction;
20pub mod activation;
21pub mod cuda;
22pub mod device;
23pub mod computational_graph;
24pub mod layer;
25pub mod persistence;
26#[macro_use]
27mod macros;
28/// Trait that defines a stack to store the results computed by forward propagation when training a neural network.
29pub trait Stack {
30    /// Stack containing elements that do not include the top element of the stack
31    type Remaining: Stack;
32    /// Top element of the stack
33    type Head;
34
35    /// Returns a tuple of the top item in the stack and the rest of the stack
36    fn pop(self) -> (Self::Remaining, Self::Head);
37    /// Returns Cons with items pushed to the stack
38    /// # Arguments
39    /// * `head` - Items to be added
40    fn push<H>(self,head:H) -> Cons<Self,H> where Self: Sized;
41    /// Returns the result of applying the callback function to the top element of the stack
42    /// # Arguments
43    /// * `f` - Applicable callbacks
44    fn map<F: FnOnce(&Self::Head) -> O,O>(&self,f:F) -> O;
45    /// Returns the result of applying the callback to a stack that does not contain the top element of the stack
46    /// * `f` - Applicable callbacks
47    fn map_remaining<F: FnOnce(&Self::Remaining) -> O,O>(&self,f:F) -> O;
48    /// Returns the result of taking ownership of the first element of the stack and applying the callback function.
49    /// # Arguments
50    /// * `f` - Applicable callbacks
51    fn take_map<F: FnOnce(Self::Head) -> Result<(Self::Head, O),E>, O,E>(self, f: F) -> Result<(Self, O),E> where Self: Sized;
52    /// Pass a mutable reference to the top element of the stack to the callback function and return the result of executing it
53    /// # Arguments
54    /// * `f` - Applicable callbacks
55    fn map_mut<F: FnOnce(&mut Self::Head) -> O,O>(&mut self,f:F) -> O;
56}
57/// Stack containing elements
58#[derive(Debug,Clone)]
59pub struct Cons<R,T>(pub R,pub T) where R: Stack;
60
61impl<R,T> Cons<R,T> where R: Stack {
62    /// Returns a reference to the remaining items in the stack, not including the top item in the stack.
63    #[inline]
64    pub fn get_remaining(&self) -> &R {
65        match self {
66            &Cons(ref parent,_) => {
67                parent
68            }
69        }
70    }
71
72    /// Returns a reference to the top item on the stack
73    #[inline]
74    pub fn get_head(&self) -> &T {
75        match self {
76            &Cons(_, ref head) => {
77                head
78            }
79        }
80    }
81}
82impl<R,T> Stack for Cons<R,T> where R: Stack {
83    type Remaining = R;
84    type Head = T;
85
86    #[inline]
87    fn pop(self) -> (Self::Remaining, Self::Head) {
88        match self {
89            Cons(parent,head) => {
90                (parent,head)
91            }
92        }
93    }
94
95    #[inline]
96    fn push<H>(self,head:H) -> Cons<Self, H> where Self: Sized {
97        Cons(self,head)
98    }
99
100    #[inline]
101    fn map<F: FnOnce(&Self::Head) -> O,O>(&self,f:F) -> O {
102        f(&self.1)
103    }
104    #[inline]
105    fn map_remaining<F: FnOnce(&Self::Remaining) -> O,O>(&self,f:F) -> O { f(&self.0) }
106    #[inline]
107    fn take_map<F: FnOnce(Self::Head) -> Result<(Self::Head, O),E>, O,E>(self, f: F) -> Result<(Self, O),E> where Self: Sized {
108        let (s,h) = self.pop();
109
110        let (h,r) = f(h)?;
111
112        Ok((Cons(s,h),r))
113    }
114    #[inline]
115    fn map_mut<F: FnOnce(&mut Self::Head) -> O, O>(&mut self, f: F) -> O {
116        f(&mut self.1)
117    }
118}
119/// Empty stack, containing no elements
120#[derive(Debug,Clone)]
121pub struct Nil;
122
123impl Stack for Nil {
124    type Remaining = Nil;
125    type Head = ();
126    #[inline]
127    fn pop(self) -> (Self::Remaining, Self::Head) {
128        (Nil,())
129    }
130
131    #[inline]
132    fn push<H>(self, head: H) -> Cons<Self, H> where Self: Sized {
133        Cons(Nil,head)
134    }
135
136    #[inline]
137    fn map<F: FnOnce(&Self::Head) -> O,O>(&self,f:F) -> O {
138        f(&())
139    }
140    #[inline]
141    fn map_remaining<F: FnOnce(&Self::Remaining) -> O, O>(&self, f: F) -> O {
142        f(&Nil)
143    }
144    #[inline]
145    fn take_map<F: FnOnce(Self::Head) -> Result<(Self::Head, O),E>, O,E>(self, f: F) -> Result<(Self, O),E> where Self: Sized {
146        let (_,r) = f(())?;
147
148        Ok((Nil,r))
149    }
150    #[inline]
151    fn map_mut<F: FnOnce(&mut Self::Head) -> O, O>(&mut self, f: F) -> O {
152        f(&mut ())
153    }
154}
155
156#[cfg(test)]
157mod tests {
158    use crate::activation::ReLu;
159    use crate::arr::Arr;
160    use crate::device::DeviceCpu;
161    use crate::layer::{AddLayer};
162    use crate::layer::activation::ActivationLayer;
163    use crate::layer::input::InputLayer;
164    use crate::layer::linear::{LinearLayerBuilder};
165    use crate::layer::output::LinearOutputLayer;
166    use crate::optimizer::SGDBuilder;
167
168    #[test]
169    fn build_layers() {
170        let device = DeviceCpu::new().unwrap();
171        let i:InputLayer<f32,Arr<f32,4>,_,_> = InputLayer::new(&device);
172        let optimizer_builder = SGDBuilder::new(&device).lr(0.01);
173
174        let _l = i.add_layer(|l| LinearLayerBuilder::<4,1>::new().build(l,&device, || 1., || 0.,&optimizer_builder).unwrap());
175    }
176
177    #[test]
178    fn build_train_layers() {
179        let device = DeviceCpu::new().unwrap();
180        let i:InputLayer<f32,Arr<f32,4>,_,_> = InputLayer::new(&device);
181        let optimizer_builder = SGDBuilder::new(&device).lr(0.01);
182
183        let _l = i.add_layer(|l| {
184            LinearLayerBuilder::<4,1>::new().build(l,&device,|| 1., || 0.,&optimizer_builder).unwrap()
185        }).add_layer(|l| {
186            ActivationLayer::new(l,ReLu::new(&device),&device)
187        }).add_layer(|l| LinearOutputLayer::new(l,&device));
188    }
189}