concision_core/activate/impls/
impl_nonlinear.rs1use crate::activate::{ReLU, Sigmoid, Softmax, Tanh};
6
7use ndarray::{Array, ArrayBase, Data, Dimension, ScalarOperand};
8use num_traits::Float;
9
10impl<A, S, D> ReLU for ArrayBase<S, D>
11where
12 A: Copy + core::cmp::PartialOrd + num::Zero,
13 S: Data<Elem = A>,
14 D: Dimension,
15{
16 type Output = Array<A, D>;
17
18 fn relu(&self) -> Self::Output {
19 self.map(|&i| if i > A::zero() { i } else { A::zero() })
20 }
21}
22
23impl<A, S, D> Sigmoid for ArrayBase<S, D>
24where
25 A: Float + 'static,
26 S: Data<Elem = A>,
27 D: Dimension,
28{
29 type Output = Array<A, D>;
30
31 fn sigmoid(&self) -> Self::Output {
32 let dim = self.dim();
33 let ones = Array::<A, D>::ones(dim);
34
35 (ones + self.map(|&i| i.neg().exp())).recip()
36 }
37}
38
39impl<A, S, D> Softmax for ArrayBase<S, D>
40where
41 A: Float + ScalarOperand,
42 S: Data<Elem = A>,
43 D: Dimension,
44{
45 type Output = Array<A, D>;
46
47 fn softmax(&self) -> Self::Output {
48 let e = self.exp();
49 &e / e.sum()
50 }
51}
52
53impl<A, S, D> Tanh for ArrayBase<S, D>
54where
55 A: Float + 'static,
56 S: Data<Elem = A>,
57 D: Dimension,
58{
59 type Output = Array<A, D>;
60
61 fn tanh(&self) -> Self::Output {
62 self.map(|i| i.tanh())
63 }
64}