concision_core/activate/impls/
impl_nonlinear.rs1use crate::{
6 activate::{ReLU, Sigmoid, Softmax, Tanh},
7 sigmoid_derivative,
8};
9
10use ndarray::{Array, ArrayBase, Data, Dimension, ScalarOperand};
11use num_traits::{Float, One, Zero};
12
13impl<A, S, D> ReLU for ArrayBase<S, D>
14where
15 A: Copy + PartialOrd + Zero + One,
16 S: Data<Elem = A>,
17 D: Dimension,
18{
19 type Output = Array<A, D>;
20
21 fn relu(&self) -> Self::Output {
22 self.map(|&i| if i > A::zero() { i } else { A::zero() })
23 }
24
25 fn relu_derivative(&self) -> Self::Output {
26 self.map(|&i| if i > A::zero() { A::one() } else { A::zero() })
27 }
28}
29
30impl<A, S, D> Sigmoid for ArrayBase<S, D>
31where
32 A: Float + 'static,
33 S: Data<Elem = A>,
34 D: Dimension,
35{
36 type Output = Array<A, D>;
37
38 fn sigmoid(&self) -> Self::Output {
39 let dim = self.dim();
40 let ones = Array::<A, D>::ones(dim);
41
42 (ones + self.map(|&i| i.neg().exp())).recip()
43 }
44
45 fn sigmoid_derivative(&self) -> Self::Output {
46 self.mapv(|i| sigmoid_derivative(i))
47 }
48}
49
50impl<A, S, D> Softmax for ArrayBase<S, D>
51where
52 A: Float + ScalarOperand,
53 S: Data<Elem = A>,
54 D: Dimension,
55{
56 type Output = Array<A, D>;
57
58 fn softmax(&self) -> Self::Output {
59 let e = self.exp();
60 &e / e.sum()
61 }
62
63 fn softmax_derivative(&self) -> Self::Output {
64 let e = self.exp();
65 let sum = e.sum();
66 let softmax = &e / sum;
67
68 let ones = Array::<A, D>::ones(self.dim());
69 &softmax * (&ones - &softmax)
70 }
71}
72
73impl<A, S, D> Tanh for ArrayBase<S, D>
74where
75 A: Float + 'static,
76 S: Data<Elem = A>,
77 D: Dimension,
78{
79 type Output = Array<A, D>;
80
81 fn tanh(&self) -> Self::Output {
82 self.map(|i| i.tanh())
83 }
84
85 fn tanh_derivative(&self) -> Self::Output {
86 self.map(|i| A::one() - i.tanh().powi(2))
87 }
88}