concision_core/activate/impls/
impl_activate_nonlinear.rs1use crate::activate::{ReLUActivation, SigmoidActivation, SoftmaxActivation, TanhActivation};
7use ndarray::{Array, ArrayBase, Data, Dimension, ScalarOperand};
8use num_traits::{Float, One, Zero};
9
10impl<A, S, D> ReLUActivation for ArrayBase<S, D, A>
11where
12 A: Copy + PartialOrd + Zero + One,
13 S: Data<Elem = A>,
14 D: Dimension,
15{
16 type Output = Array<A, D>;
17
18 fn relu(&self) -> Self::Output {
19 self.map(|&i| if i > A::zero() { i } else { A::zero() })
20 }
21
22 fn relu_derivative(&self) -> Self::Output {
23 self.map(|&i| if i > A::zero() { A::one() } else { A::zero() })
24 }
25}
26
27impl<A, S, D> SigmoidActivation for ArrayBase<S, D, A>
28where
29 A: 'static + Float,
30 S: Data<Elem = A>,
31 D: Dimension,
32{
33 type Output = Array<A, D>;
34
35 fn sigmoid(self) -> Self::Output {
36 let dim = self.dim();
37 let ones = Array::<A, D>::ones(dim);
38
39 (ones + self.signum().exp()).recip()
40 }
41
42 fn sigmoid_derivative(self) -> Self::Output {
43 self.mapv(|i| {
44 let s = (A::one() + i.neg().exp()).recip();
45 s * (A::one() - s)
46 })
47 }
48}
49
50impl<A, S, D> SoftmaxActivation for ArrayBase<S, D, A>
51where
52 A: Float + ScalarOperand,
53 S: Data<Elem = A>,
54 D: Dimension,
55{
56 type Output = Array<A, D>;
57
58 fn softmax(&self) -> Self::Output {
59 let exp = self.exp();
60 &exp / exp.sum()
61 }
62
63 fn softmax_derivative(&self) -> Self::Output {
64 let softmax = self.softmax();
65
66 let ones = Array::<A, D>::ones(self.dim());
67 &softmax * (&ones - &softmax)
68 }
69}
70
71impl<A, S, D> TanhActivation for ArrayBase<S, D, A>
72where
73 A: 'static + Float,
74 S: Data<Elem = A>,
75 D: Dimension,
76{
77 type Output = Array<A, D>;
78
79 fn tanh(&self) -> Self::Output {
80 self.mapv(|i| i.tanh())
81 }
82
83 fn tanh_derivative(&self) -> Self::Output {
84 self.mapv(|i| A::one() - i.tanh().powi(2))
85 }
86}