1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
extern crate ndarray;
use ndarray::prelude::*;
/// Relu activation function for 1d array
///
/// More info: https://machinelearningmastery.com/rectified-linear-activation-function-for-deep-learning-neural-networks/
///
/// # Arguments
///
/// * `input_array`: 1d array
///
/// returns: `Array1<f64>`
///
/// # Examples
///
/// ```
/// use ducky_learn::activations::*;
/// use ndarray::arr1;
///
/// let input_array = arr1(&[-1.3456435325242, -32145324321., -132432888.]);
/// assert_eq!(relu_1d(input_array), arr1(&[0., 0., 0.]));
/// ```
pub fn relu_1d(input_array: Array1<f64>) -> Array1<f64> {
input_array.map(|value| value.max(0.))
}
/// Derivative of Relu function
///
/// # Arguments
///
/// * `input_array`: 1d array
///
/// returns: `Array1<f64>`
///
/// # Examples
///
/// ```
/// use ducky_learn::activations::*;
/// use ndarray::arr1;
///
/// let input_array = arr1(&[1.3456435325242, -32145324321., 132432888.]);
/// assert_eq!(deriv_relu_1d(input_array), arr1(&[1., 0., 1.]));
/// ```
pub fn deriv_relu_1d(input_array: Array1<f64>) -> Array1<f64> {
input_array.map(|value| (*value > 0f64) as i32 as f64)
}
/// Softmax activation function for 1d array. Note that you can run into NaN issue if values are
/// < -1000 or > 1000 (https://users.rust-lang.org/t/watch-out-for-nans/70016)
///
/// More info: https://deepai.org/machine-learning-glossary-and-terms/softmax-layer#:~:text=The%20softmax%20function%20is%20a,can%20be%20interpreted%20as%20probabilities.
///
/// # Arguments
///
/// * `input_array`: 1d array
///
/// returns: `Array1<f64>`
///
/// # Examples
///
/// ```
/// use ducky_learn::activations::*;
/// use ndarray::arr1;
///
/// let input_array = arr1(&[0., 1., -1., 0.01, -0.1]);
/// assert_eq!(softmax_1d(input_array),
/// arr1(&[0.16663753690463112, 0.4529677885070323, 0.0613025239546613, 0.16831227199301688, 0.15077987864065834]));
/// ```
pub fn softmax_1d(input_array: Array1<f64>) -> Array1<f64> {
let sum_exp_input_array = input_array.map(|value| value.exp()).sum();
input_array.map(|value| value.exp() / sum_exp_input_array)
}