rusty_machine/learning/toolkit/activ_fn.rs
1//! Activation Functions.
2//!
3//! This module contains a number of structs implementing the `ActivationFunc` trait.
4//!
5//! These structs are used within Neural Networks and
6//! Generalized Linear Regression (not yet implemented).
7//!
8//! You can also create your own custom activation Functions for use in your models.
9//! Just create a unit struct implementing the `ActivationFunc` trait.
10
11/// Trait for activation functions in models.
12pub trait ActivationFunc {
13 /// The activation function.
14 fn func(x: f64) -> f64;
15
16 /// The gradient of the activation function.
17 fn func_grad(x: f64) -> f64;
18
19 /// The inverse of the activation function.
20 fn func_inv(x: f64) -> f64;
21}
22
23/// Sigmoid activation function.
24#[derive(Clone, Copy, Debug)]
25pub struct Sigmoid;
26
27impl ActivationFunc for Sigmoid {
28 /// Sigmoid function.
29 ///
30 /// Returns 1 / ( 1 + e^-t).
31 fn func(x: f64) -> f64 {
32 1.0 / (1.0 + (-x).exp())
33 }
34
35 /// Gradient of sigmoid function.
36 ///
37 /// Evaluates to (1 - e^-t) / (1 + e^-t)^2
38 fn func_grad(x: f64) -> f64 {
39 Self::func(x) * (1f64 - Self::func(x))
40 }
41
42 fn func_inv(x: f64) -> f64 {
43 (x / (1f64 - x)).ln()
44 }
45}
46
47/// Linear activation function.
48#[derive(Clone, Copy, Debug)]
49pub struct Linear;
50
51impl ActivationFunc for Linear {
52 fn func(x: f64) -> f64 {
53 x
54 }
55
56 fn func_grad(_: f64) -> f64 {
57 1f64
58 }
59
60 fn func_inv(x: f64) -> f64 {
61 x
62 }
63}
64
65/// Exponential activation function.
66#[derive(Clone, Copy, Debug)]
67pub struct Exp;
68
69impl ActivationFunc for Exp {
70 fn func(x: f64) -> f64 {
71 x.exp()
72 }
73
74 fn func_grad(x: f64) -> f64 {
75 Self::func(x)
76 }
77
78 fn func_inv(x: f64) -> f64 {
79 x.ln()
80 }
81}