-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathActiveFunction.py
105 lines (75 loc) · 2.35 KB
/
ActiveFunction.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
"""
several active functions:
ReLU
Sigmoid
LeakyReLU
Softmax
Tanh
"""
import numpy as np
class ReLU:
def __init__(self):
self.middle = None
def __call__(self, x):
return self.forward(x)
def forward(self, x):
self.middle = x > 0
return self.middle * x
def backward(self, grad_in):
return grad_in * self.middle
def __repr__(self):
return "Active Function : ReLU"
class Sigmoid:
def __init__(self):
self.middle = None
def __call__(self, x):
return self.forward(x)
def forward(self, x):
self.middle = 1.0 / (1 + np.exp(-x))
return self.middle
def backward(self, grad_in):
return grad_in * self.middle * (1 - self.middle)
def __repr__(self):
return "Active Function : Sigmoid"
class LeakyReLU:
def __init__(self, slope=0.1):
self.middle = None
self.slope = slope
def __call__(self, x):
return self.forward(x)
def forward(self, x):
self.middle = x > 0
return self.middle * x + (1 - self.middle) * self.slope * x
def backward(self, grad_in):
return self.middle * grad_in + (1 - self.middle) * self.slope * grad_in
def __repr__(self):
return "Active Function : LeakyReLU"
class Softmax:
def __init__(self):
self.middle = None
def __call__(self, x):
return self.forward(x)
def forward(self, x):
# default : rows represent the different samples
# cols represent the scores
x = x - np.max(x, axis=1, keepdims=True) # prevent the boom
self.middle = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
return self.middle
def backward(self, grad_in):
temp = np.sum(grad_in * self.middle, axis=1, keepdims=True)
return self.middle * grad_in - self.middle * temp
def __repr__(self):
return "Active Function : Softmax"
class Tanh:
def __init__(self):
self.middle = None
def __call__(self, x):
return self.forward(x)
def forward(self, x):
x = x - np.max(x, axis=1, keepdims=True)
self.middle = (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))
return self.middle
def backward(self, grad_in):
return grad_in * (1 - self.middle ** 2)
def __repr__(self):
return "Active Function : Tanh"