concision_core/activate/impls/
impl_nonlinear.rs1use crate::activate::{ReLU, Sigmoid, Softmax, Tanh,
6 utils::sigmoid_derivative,
7};
8
9use ndarray::{Array, ArrayBase, Data, Dimension, ScalarOperand};
10use num_traits::{Float, One, Zero};
11
12impl<A, S, D> ReLU for ArrayBase<S, D>
13where
14 A: Copy + PartialOrd + Zero + One,
15 S: Data<Elem = A>,
16 D: Dimension,
17{
18 type Output = Array<A, D>;
19
20 fn relu(&self) -> Self::Output {
21 self.map(|&i| if i > A::zero() { i } else { A::zero() })
22 }
23
24 fn relu_derivative(&self) -> Self::Output {
25 self.map(|&i| if i > A::zero() { A::one() } else { A::zero() })
26 }
27}
28
29impl<A, S, D> Sigmoid for ArrayBase<S, D>
30where
31 A: Float + 'static,
32 S: Data<Elem = A>,
33 D: Dimension,
34{
35 type Output = Array<A, D>;
36
37 fn sigmoid(self) -> Self::Output {
38 let dim = self.dim();
39 let ones = Array::<A, D>::ones(dim);
40
41 (ones + self.map(|&i| i.neg().exp())).recip()
42 }
43
44 fn sigmoid_derivative(self) -> Self::Output {
45 self.mapv(|i| sigmoid_derivative(i))
46 }
47}
48
49impl<A, S, D> Softmax for ArrayBase<S, D>
50where
51 A: Float + ScalarOperand,
52 S: Data<Elem = A>,
53 D: Dimension,
54{
55 type Output = Array<A, D>;
56
57 fn softmax(&self) -> Self::Output {
58 let e = self.exp();
59 &e / e.sum()
60 }
61
62 fn softmax_derivative(&self) -> Self::Output {
63 let e = self.exp();
64 let sum = e.sum();
65 let softmax = &e / sum;
66
67 let ones = Array::<A, D>::ones(self.dim());
68 &softmax * (&ones - &softmax)
69 }
70}
71
72impl<A, S, D> Tanh for ArrayBase<S, D>
73where
74 A: Float + 'static,
75 S: Data<Elem = A>,
76 D: Dimension,
77{
78 type Output = Array<A, D>;
79
80 fn tanh(&self) -> Self::Output {
81 self.map(|i| i.tanh())
82 }
83
84 fn tanh_derivative(&self) -> Self::Output {
85 self.map(|i| A::one() - i.tanh().powi(2))
86 }
87}