concision_core/activate/mod.rs
1/*
2 Appellation: activate <module>
3 Contrib: FL03 <jo3mccain@icloud.com>
4*/
5//! Activation functions for neural networks and their components. These functions are often
6//! used to introduce non-linearity into the model, allowing it to learn more complex patterns
7//! in the data.
8//!
9//! ## Overview
10//!
11//! This module works to provide a complete set of activation utilities for neural networks,
12//! manifesting in a number of traits, utilities, and other primitives used to define various
13//! approaches to activation functions.
14//!
15//! - [Heavyside]
16//! - [LinearActivation]
17//! - [Sigmoid]
18//! - [Softmax]
19//! - [ReLU]
20//! - [Tanh]
21//!
22#[doc(inline)]
23pub use self::prelude::*;
24
25pub(crate) mod traits {
26 #[doc(inline)]
27 pub use self::prelude::*;
28
29 mod activate;
30 mod unary;
31
32 mod prelude {
33 #[doc(inline)]
34 pub use super::activate::*;
35 #[doc(inline)]
36 pub use super::unary::*;
37 }
38}
39
40pub(crate) mod utils {
41 #[doc(inline)]
42 pub use self::prelude::*;
43
44 mod non_linear;
45 mod simple;
46
47 mod prelude {
48 #[doc(inline)]
49 pub use super::non_linear::*;
50 #[doc(inline)]
51 pub use super::simple::*;
52 }
53}
54
55mod impls {
56 mod impl_binary;
57 mod impl_linear;
58 mod impl_nonlinear;
59}
60
61pub(crate) mod prelude {
62 #[doc(inline)]
63 pub use super::traits::*;
64 #[doc(inline)]
65 pub use super::utils::*;
66}