-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathneuron.go
137 lines (114 loc) · 3.19 KB
/
neuron.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
package neural
import (
"crypto/rand"
"math/big"
)
// Neuron is a set of weights + bias linked to a layer
type Neuron struct {
MaxInputs int `json:"-"`
Weights []float64 `json:"Weights"`
Bias float64 `json:"Bias"`
// Previous momentum of every weight and bias
Momentums []float64 `json:"-"`
// Layer to which neuron is linked
Layer *Layer `json:"-"`
activation float64
delta float64
error float64
Inputs []float64 `json:"-"`
}
// NewNeuron creates a neuron linked to a layer
func NewNeuron(Layer *Layer, MaxInputs int) *Neuron {
neuron := &Neuron{
MaxInputs: MaxInputs,
Weights: make([]float64, MaxInputs),
Bias: randomFloat(-1.0, 1.0),
Momentums: make([]float64, MaxInputs+1),
Layer: Layer,
Inputs: make([]float64, MaxInputs),
}
for i := 0; i < neuron.MaxInputs; i++ {
neuron.Weights[i] = randomFloat(-1.0, 1.0)
}
return neuron
}
// Think process the neuron forward based on inputs
func (neuron *Neuron) Think(inputs []float64) float64 {
sum := neuron.Bias
for i := 0; i < neuron.MaxInputs; i++ {
sum += inputs[i] * neuron.Weights[i]
neuron.Inputs[i] = inputs[i]
}
neuron.activation = neuron.Layer.Forward(sum)
return neuron.activation
}
// Optimizer learning by momentum
func (neuron *Neuron) Optimizer(index int, value float64) float64 {
neuron.Momentums[index] = value + (neuron.Layer.Momentum * neuron.Momentums[index])
return neuron.Momentums[index]
}
// Clone neuron with same weights, bias, etc
func (neuron *Neuron) Clone() *Neuron {
clone := NewNeuron(neuron.Layer, neuron.MaxInputs)
for i := 0; i < neuron.MaxInputs; i++ {
clone.Weights[i] = neuron.Weights[i]
}
clone.Bias = neuron.Bias
return clone
}
// Mutate randomizing weights/bias based on probability
func (neuron *Neuron) Mutate(probability float64) {
for i := 0; i < neuron.MaxInputs; i++ {
if probability >= cryptoRandomFloat() {
neuron.Weights[i] += randomFloat(-1.0, 1.0)
neuron.Momentums[i] = 0.0
}
}
if probability >= cryptoRandomFloat() {
neuron.Bias += randomFloat(-1.0, 1.0)
neuron.Momentums[neuron.MaxInputs] = 0.0
}
}
// Crossover two neurons merging weights and bias
func (neuron *Neuron) Crossover(neuronB Neuron, dominant float64) *Neuron {
new := NewNeuron(neuron.Layer, neuron.MaxInputs)
for i := 0; i < new.MaxInputs; i++ {
if cryptoRandomFloat() >= 0.5 {
new.Weights[i] = neuron.Weights[i]
} else {
new.Weights[i] = neuronB.Weights[i]
}
}
if cryptoRandomFloat() >= 0.5 {
new.Bias = neuron.Bias
} else {
new.Bias = neuronB.Bias
}
return new
}
// Reset weights, bias and momentums
func (neuron *Neuron) Reset() {
for i := 0; i < neuron.MaxInputs; i++ {
neuron.Weights[i] = randomFloat(-1.0, 1.0)
neuron.Momentums[i] = 0.0
}
neuron.Bias = randomFloat(-1.0, 1.0)
neuron.Momentums[neuron.MaxInputs] = 0.0
}
func randomFloat(min float64, max float64) float64 {
return min + cryptoRandomFloat()*(max-min)
}
func cryptoRandomFloat() float64 {
num, err := rand.Int(rand.Reader, big.NewInt(1e17))
if err != nil {
panic(err)
}
return float64(num.Int64()) / float64(1e17)
}
func randomInt(max int64) int {
num, err := rand.Int(rand.Reader, big.NewInt(max))
if err != nil {
panic(err)
}
return int(num.Int64())
}