qmachina/activation/mod.rs
1//! This module contains various activation functions implementations.
2
3pub mod step;
4pub mod sigmoid;
5pub mod tanh;
6pub mod relu;
7pub mod leaky_relu;
8pub mod param_relu;
9pub mod elu;
10pub mod swish;
11pub mod softmax;
12
13/// `ActivationFunction` trait defines a general interface for activation functions
14/// used in neural networks. Activation functions are fundamental to neural networks
15/// as they introduce non-linearity, allowing the network to learn complex patterns
16/// and perform tasks beyond just linear classification or regression.
17///
18/// This trait is generic, allowing for flexibility in the kinds of data structures
19/// and types that the activation functions can handle. The generic type parameters
20/// `X` and `Y` enable this trait to be implemented for various input and output types,
21/// supporting a wide range of neural network architectures and applications.
22///
23/// # Type Parameters
24///
25/// * `X`: Represents the type of the input to the activation function. This could be
26/// a single value (like `f64`), a complex data structure (like `Vec<f64>`), or any
27/// other type that represents the input to a neuron or a layer in a neural network.
28///
29/// * `Y`: Represents the type of the output from the activation function. Similar to `X`,
30/// this could range from a single value to more complex data structures, depending on
31/// the design and requirements of the neural network.
32///
33/// # Implementations
34///
35/// Implementations of this trait could include standard activation functions like
36/// Sigmoid, Tanh, ReLU, and their variants, each potentially tailored to handle
37/// different kinds of inputs and outputs as required by specific neural network models.
38pub trait ActivationFunction<X, Y> {
39 /// Computes the activated value for a given input.
40 ///
41 /// # Arguments
42 ///
43 /// * `input` - The input value to the activation function of type `X`.
44 ///
45 /// # Returns
46 ///
47 /// Returns the activated output of type `Y`.
48 fn activate(&self, input: X) -> Y;
49
50 /// Computes the derivative of the activation function for a given input.
51 ///
52 /// # Arguments
53 ///
54 /// * `input` - The input value for which the derivative is to be calculated, of type `X`.
55 ///
56 /// # Returns
57 ///
58 /// Returns the derivative of the activation function at the given input, of type `Y`.
59 fn derivate(&self, input: X) -> Y;
60}