1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
use std::fmt::Debug;
pub trait ActivationFunc: Clone + Debug {
fn func(x: f64) -> f64;
fn func_grad(x: f64) -> f64;
fn func_grad_from_output(y: f64) -> f64;
fn func_inv(x: f64) -> f64;
}
#[derive(Clone, Copy, Debug)]
pub struct Sigmoid;
impl ActivationFunc for Sigmoid {
fn func(x: f64) -> f64 {
1.0 / (1.0 + (-x).exp())
}
fn func_grad(x: f64) -> f64 {
Self::func(x) * (1f64 - Self::func(x))
}
fn func_grad_from_output(y: f64) -> f64 {
y * (1f64 - y)
}
fn func_inv(x: f64) -> f64 {
(x / (1f64 - x)).ln()
}
}
#[derive(Clone, Copy, Debug)]
pub struct Linear;
impl ActivationFunc for Linear {
fn func(x: f64) -> f64 {
x
}
fn func_grad(_: f64) -> f64 {
1f64
}
fn func_grad_from_output(_: f64) -> f64 {
1f64
}
fn func_inv(x: f64) -> f64 {
x
}
}
#[derive(Clone, Copy, Debug)]
pub struct Exp;
impl ActivationFunc for Exp {
fn func(x: f64) -> f64 {
x.exp()
}
fn func_grad(x: f64) -> f64 {
Self::func(x)
}
fn func_grad_from_output(y: f64) -> f64 {
y
}
fn func_inv(x: f64) -> f64 {
x.ln()
}
}
#[derive(Clone, Copy, Debug)]
pub struct Tanh;
impl ActivationFunc for Tanh {
fn func(x: f64) -> f64 {
x.tanh()
}
fn func_grad(x: f64) -> f64 {
let y = x.tanh();
1.0 - y*y
}
fn func_grad_from_output(y: f64) -> f64 {
1.0 - y*y
}
fn func_inv(x: f64) -> f64 {
0.5*((1.0+x)/(1.0-x)).ln()
}
}