-
Notifications
You must be signed in to change notification settings - Fork 6
/
relu_layer.go
144 lines (122 loc) · 3.7 KB
/
relu_layer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
package cnns
import (
"fmt"
"github.com/LdDl/cnns/tensor"
"gonum.org/v1/gonum/mat"
)
// ReLULayer Rectified Linear Unit layer (activation: max(0, x))
/*
Oj - Input data
Ok - Output data
LocalDelta - Incoming gradients*weights (backpropagation)
*/
type ReLULayer struct {
Oj *mat.Dense
Ok *mat.Dense
LocalDelta *mat.Dense
OutputSize *tensor.TDsize
inputSize *tensor.TDsize
trainMode bool
}
// NewReLULayer - Constructor for new ReLU layer. You need to specify input size
/*
inSize - input layer's size
*/
func NewReLULayer(inSize *tensor.TDsize) Layer {
newLayer := &ReLULayer{
inputSize: inSize,
Oj: mat.NewDense(inSize.X*inSize.Z, inSize.Y, nil),
Ok: mat.NewDense(inSize.X*inSize.Z, inSize.Y, nil),
LocalDelta: mat.NewDense(inSize.X*inSize.Z, inSize.Y, nil),
OutputSize: &tensor.TDsize{X: inSize.X, Y: inSize.Y, Z: inSize.Z},
trainMode: false,
}
return newLayer
}
// SetCustomWeights Set user's weights for ReLU layer (make it carefully)
func (relu *ReLULayer) SetCustomWeights(t []*mat.Dense) {
fmt.Println("There are no weights for ReLU layer")
}
// GetInputSize Returns dimensions of incoming data for ReLU layer
func (relu *ReLULayer) GetInputSize() *tensor.TDsize {
return relu.inputSize
}
// GetOutputSize Returns output size (dimensions) of ReLU layer
func (relu *ReLULayer) GetOutputSize() *tensor.TDsize {
return relu.OutputSize
}
// GetActivatedOutput Returns ReLU layer's output
func (relu *ReLULayer) GetActivatedOutput() *mat.Dense {
return relu.Ok
}
// GetWeights Returns ReLU layer's weights
func (relu *ReLULayer) GetWeights() []*mat.Dense {
fmt.Println("There are no weights for ReLU layer")
return nil
}
// GetGradients Returns ReLU layer's gradients
func (relu *ReLULayer) GetGradients() *mat.Dense {
return relu.LocalDelta
}
// FeedForward - Feed data to ReLU layer
func (relu *ReLULayer) FeedForward(t *mat.Dense) error {
relu.Oj = t
relu.doActivation()
return nil
}
// doActivation ReLU layer's output activation
func (relu *ReLULayer) doActivation() {
rawOj := relu.Oj.RawMatrix().Data
rawOk := relu.Ok.RawMatrix().Data
for j := range rawOj {
if rawOj[j] < 0 {
rawOk[j] = 0
} else {
rawOk[j] = rawOj[j]
}
}
}
// CalculateGradients Evaluate ReLU layer's gradients
func (relu *ReLULayer) CalculateGradients(errorsDense *mat.Dense) error {
raw := relu.Oj.RawMatrix().Data
rawDelta := relu.LocalDelta.RawMatrix().Data
rawErrors := errorsDense.RawMatrix().Data
for i := range raw {
if raw[i] < 0 {
rawDelta[i] = 0
} else {
rawDelta[i] = rawErrors[i]
}
}
return nil
}
// UpdateWeights Just to point, that ReLU layer does NOT updating weights
func (relu *ReLULayer) UpdateWeights(lp *LearningParams) {
// There are no weights to update for ReLU layer
}
// PrintOutput Pretty print ReLU layer's output
func (relu *ReLULayer) PrintOutput() {
fmt.Println("Printing ReLU Layer output...")
}
// PrintWeights Just to point, that ReLU layer has not weights
func (relu *ReLULayer) PrintWeights() {
fmt.Println("There are no weights for ReLU layer")
}
// SetActivationFunc Set activation function for layer
func (relu *ReLULayer) SetActivationFunc(f func(v float64) float64) {
// Nothing here. Just for interface.
fmt.Println("You can not set activation function for ReLU layer")
}
// SetActivationDerivativeFunc Set derivative of activation function
func (relu *ReLULayer) SetActivationDerivativeFunc(f func(v float64) float64) {
// Nothing here. Just for interface.
fmt.Println("You can not set derivative of activation function for ReLU layer")
}
// GetStride Returns stride of layer
func (relu *ReLULayer) GetStride() int {
return 0
}
// GetType Returns "relu" as layer's type
func (relu *ReLULayer) GetType() string {
return "relu"
}