-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathlayer.go
123 lines (101 loc) · 3 KB
/
layer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
package neural
// Layer is a set of neurons + config
type Layer struct {
// Amount of inputs (default is previous layer units)
Inputs int `json:"-"`
Units int `json:"-"`
Neurons []*Neuron `json:"Neurons"`
// Default activation is sigmoid
Activation string `json:"Activation,omitempty"`
Forward ForwardFn `json:"-"`
Backward BackwardFn `json:"-"`
// Default loss is mse
Loss string `json:"Loss,omitempty"`
LossFn LossFn `json:"-"`
// Default rate is 0.001
Rate float64 `json:"-"`
// Default momentum is 0.999
Momentum float64 `json:"-"`
// Range of arbitrary values for input/output layers
Range [][]float64 `json:"Range,omitempty"`
}
// NewLayer creates a layer based on simple layer definition
func NewLayer(layer *Layer) *Layer {
if layer.Rate == 0.0 {
layer.Rate = 0.001
}
if layer.Momentum == 0.0 {
layer.Momentum = 0.999
}
layer.Neurons = make([]*Neuron, layer.Units)
for i := 0; i < layer.Units; i++ {
layer.Neurons[i] = NewNeuron(layer, layer.Inputs)
}
activation := layer.SetActivation(layer.Activation)
if len(activation.Ranges) == 0 {
layer.Range = [][]float64{}
}
for i, total := 0, len(layer.Range); i < total; i++ {
layer.Range[i] = append(layer.Range[i], activation.Ranges[0], activation.Ranges[1])
}
return layer
}
// Think process the layer forward based on inputs
func (layer *Layer) Think(inputs []float64) []float64 {
outs := make([]float64, layer.Units)
for i := 0; i < layer.Units; i++ {
outs[i] = layer.Neurons[i].Think(inputs)
}
return outs
}
// Clone layer with same neurons, activation, range, etc
func (layer *Layer) Clone() *Layer {
clone := NewLayer(&Layer{
Inputs: layer.Inputs,
Units: layer.Units,
Activation: layer.Activation,
Rate: layer.Rate,
Momentum: layer.Momentum,
})
for i := 0; i < clone.Units; i++ {
clone.Neurons[i] = layer.Neurons[i].Clone()
}
clone.Range = make([][]float64, len(layer.Range))
copy(clone.Range, layer.Range)
return clone
}
// Mutate neurons of layer based on probability
func (layer *Layer) Mutate(probability float64) {
for i := 0; i < layer.Units; i++ {
layer.Neurons[i].Mutate(probability)
}
}
// Crossover two layers merging neurons
func (layer *Layer) Crossover(layerB *Layer, dominant float64) *Layer {
new := NewLayer(&Layer{
Inputs: layer.Inputs,
Units: layer.Units,
Activation: layer.Activation,
Rate: layer.Rate,
Momentum: layer.Momentum,
})
for i := 0; i < layer.Units; i++ {
new.Neurons[i] = layer.Neurons[i].Crossover(*layerB.Neurons[i], dominant)
}
new.Range = make([][]float64, len(layer.Range))
copy(new.Range, layer.Range)
return new
}
// Reset every neuron (weights, bias, etc)
func (layer *Layer) Reset() {
for i := 0; i < layer.Units; i++ {
layer.Neurons[i].Reset()
}
}
// SetActivation set or change the activation functions based on name
func (layer *Layer) SetActivation(activation string) ActivationSet {
set := selectActivation(activation)
layer.Forward = set.Forward
layer.Backward = set.Backward
return set
}