-
Notifications
You must be signed in to change notification settings - Fork 13
/
activation.py
84 lines (64 loc) · 2.12 KB
/
activation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import numpy as np
class Relu:
def __init__(self):
self.cache = {}
self.has_units = False
def has_weights(self):
return self.has_units
def forward_propagate(self, Z, save_cache=False):
if save_cache:
self.cache['Z'] = Z
return np.where(Z >= 0, Z, 0)
def back_propagate(self, dA):
Z = self.cache['Z']
return dA * np.where(Z >= 0, 1, 0)
class Softmax:
def __init__(self):
self.cache = {}
self.has_units = False
def has_weights(self):
return self.has_units
def forward_propagate(self, Z, save_cache=False):
if save_cache:
self.cache['Z'] = Z
Z_ = Z - Z.max()
e = np.exp(Z_)
return e / np.sum(e, axis=0, keepdims=True)
def back_propagate(self, dA):
Z = self.cache['Z']
return dA * (Z * (1 - Z))
class Elu:
def __init__(self, alpha=1.2):
self.cache = {}
self.params = {
'alpha': alpha
}
self.has_units = False
def has_weights(self):
return self.has_units
def forward_propagate(self, Z, save_cache=False):
if save_cache:
self.cache['Z'] = Z
return np.where(Z >= 0, Z, self.params['alpha'] * (np.exp(Z) - 1))
def back_propagate(self, dA):
alpha = self.params['alpha']
Z = self.cache['Z']
return dA * np.where(Z >= 0, 1, self.forward_propagate(Z, alpha) + alpha)
class Selu:
def __init__(self, alpha=1.6733, selu_lambda=1.0507):
self.params = {
'alpha' : alpha,
'lambda' : selu_lambda
}
self.cache = {}
self.has_units = False
def has_weights(self):
return self.has_units
def forward_propagate(self, Z, save_cache=False):
if save_cache:
self.cache['Z'] = Z
return self.params['lambda'] * np.where(Z >= 0, Z, self.params['alpha'] * (np.exp(Z) - 1))
def back_propagate(self, dA):
Z = self.cache['Z']
selu_lambda, alpha = self.params['lambda'], self.params['alpha']
return dA * selu_lambda*np.where(Z >= 0, 1, alpha*np.exp(Z))