-
Notifications
You must be signed in to change notification settings - Fork 1
/
activations.py
63 lines (43 loc) · 1.31 KB
/
activations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import numpy as np
class LeakyReLu:
def __init__(self, alpha=0.001):
self.alpha = alpha
self.input = None
def forward(self, z):
self.input = z
return np.maximum(self.alpha * z, z)
def backward(self, d_a):
r = np.zeros_like(self.input)
r[self.input > 0] = 1
r[self.input < 0] = self.alpha
return r * d_a
class Sigmoid:
def __init__(self):
self.input = None
self.output = None
def forward(self, z):
self.input = z
self.output = 1 / (1 + np.exp(-z))
return self.output
def backward(self, d_a):
return (self.output * (1 - self.output)) * d_a
class Tanh:
def __init__(self):
self.input = None
self.output = None
def forward(self, z):
self.output = np.tanh(z)
return self.output
def backward(self, d_a):
return (1 - self.output ** 2) * d_a
class Softmax:
def __init__(self):
self.input = None
self.output = None
def forward(self, x):
self.input = x
e = np.exp(x - x.max(axis=1, keepdims=True))
self.output = e / np.nansum(e, axis=1, keepdims=True)
return self.output
def backward(self, d_a):
return self.output*(d_a-np.sum(self.output*d_a, axis=1, keepdims=True))