neuros/
activations.rs

1
2#[derive(Debug, Copy, Clone)]
3pub enum Activations {
4	Sigmoid,
5	TanH,
6	ReLU,
7	LeakyRelu,
8	SoftMax,
9	Linear
10}
11
12pub fn activation_to_string(act:&Activations) -> String {
13	match act {
14		Activations::Sigmoid => "sigmoid".to_string(),
15		Activations::TanH => "tanh".to_string(),
16		Activations::ReLU => "relu".to_string(),
17		Activations::LeakyRelu => "leakyrelu".to_string(),
18		Activations::Linear => "linear".to_string(),
19		Activations::SoftMax => "softmax".to_string(),
20	}
21}
22
23pub fn string_to_activation(val:&str) -> Activations {
24	match val {
25		"sigmoid" => Activations::Sigmoid,
26		"tanh" => Activations::TanH,
27		"relu" => Activations::ReLU,
28		"leakyrelu" => Activations::LeakyRelu,
29		"linear" => Activations::Linear,
30		"softmax" => Activations::SoftMax,
31		_ => Activations::LeakyRelu
32	}
33}
34
35pub fn sigmoid(x:f64) -> f64
36{
37	let k = x.exp();
38	k / (1.0 + k)
39}
40
41pub fn tanh(x:f64) -> f64
42{
43	x.tanh()
44}
45
46pub fn relu(x:f64) -> f64
47{
48	match 0.0 >= x {
49		true => {0.0},
50		false => {x}
51	}
52}
53
54pub fn leakyrelu(x:f64) -> f64
55{
56	match 0.0 >= x {
57		true => {0.01 * x},
58		false => {x}
59	}
60}
61
62pub fn sigmoid_deriv(x:f64) -> f64
63{
64	x * (1.0 - x)
65}
66
67pub fn tanh_deriv(x:f64) -> f64
68{
69	1.0 - (x * x)
70}
71
72pub fn relu_deriv(x:f64) -> f64
73{
74	match 0.0 >= x {
75		true => {0.0},
76		false => {x}
77	}
78}
79
80pub fn leakyrelu_deriv(x:f64) -> f64
81{
82	match 0.0 >= x {
83		true => {0.01},
84		false => {x}
85	}
86}
87
88pub fn softmax(x:f64) -> f64
89{
90	x.exp()
91}
92