-
Notifications
You must be signed in to change notification settings - Fork 26
/
kmeans.go
154 lines (141 loc) · 4.12 KB
/
kmeans.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
package kmeans
import (
"math"
"math/rand"
)
// Observation: Data Abstraction for an N-dimensional
// observation
type Observation []float64
// Abstracts the Observation with a cluster number
// Update and computeation becomes more efficient
type ClusteredObservation struct {
ClusterNumber int
Observation
}
// Distance Function: To compute the distanfe between observations
type DistanceFunction func(first, second []float64) (float64, error)
/*
func (observation Observation) Sqd(otherObservation Observation) (ssq float64) {
for ii, jj := range observation {
d := jj - otherObservation[ii]
ssq += d * d
}
return ssq
}
*/
// Summation of two vectors
func (observation Observation) Add(otherObservation Observation) {
for ii, jj := range otherObservation {
observation[ii] += jj
}
}
// Multiplication of a vector with a scalar
func (observation Observation) Mul(scalar float64) {
for ii := range observation {
observation[ii] *= scalar
}
}
// Dot Product of Two vectors
func (observation Observation) InnerProduct(otherObservation Observation) {
for ii := range observation {
observation[ii] *= otherObservation[ii]
}
}
// Outer Product of two arrays
// TODO: Need to be tested
func (observation Observation) OuterProduct(otherObservation Observation) [][]float64 {
result := make([][]float64, len(observation))
for ii := range result {
result[ii] = make([]float64, len(otherObservation))
}
for ii := range result {
for jj := range result[ii] {
result[ii][jj] = observation[ii] * otherObservation[jj]
}
}
return result
}
// Find the closest observation and return the distance
// Index of observation, distance
func near(p ClusteredObservation, mean []Observation, distanceFunction DistanceFunction) (int, float64) {
indexOfCluster := 0
minSquaredDistance, _ := distanceFunction(p.Observation, mean[0])
for i := 1; i < len(mean); i++ {
squaredDistance, _ := distanceFunction(p.Observation, mean[i])
if squaredDistance < minSquaredDistance {
minSquaredDistance = squaredDistance
indexOfCluster = i
}
}
return indexOfCluster, math.Sqrt(minSquaredDistance)
}
// Instead of initializing randomly the seeds, make a sound decision of initializing
func seed(data []ClusteredObservation, k int, distanceFunction DistanceFunction) []Observation {
s := make([]Observation, k)
s[0] = data[rand.Intn(len(data))].Observation
d2 := make([]float64, len(data))
for ii := 1; ii < k; ii++ {
var sum float64
for jj, p := range data {
_, dMin := near(p, s[:ii], distanceFunction)
d2[jj] = dMin * dMin
sum += d2[jj]
}
target := rand.Float64() * sum
jj := 0
for sum = d2[0]; sum < target; sum += d2[jj] {
jj++
}
s[ii] = data[jj].Observation
}
return s
}
// K-Means Algorithm
func kmeans(data []ClusteredObservation, mean []Observation, distanceFunction DistanceFunction, threshold int) ([]ClusteredObservation, error) {
counter := 0
for ii, jj := range data {
closestCluster, _ := near(jj, mean, distanceFunction)
data[ii].ClusterNumber = closestCluster
}
mLen := make([]int, len(mean))
for n := len(data[0].Observation); ; {
for ii := range mean {
mean[ii] = make(Observation, n)
mLen[ii] = 0
}
for _, p := range data {
mean[p.ClusterNumber].Add(p.Observation)
mLen[p.ClusterNumber]++
}
for ii := range mean {
mean[ii].Mul(1 / float64(mLen[ii]))
}
var changes int
for ii, p := range data {
if closestCluster, _ := near(p, mean, distanceFunction); closestCluster != p.ClusterNumber {
changes++
data[ii].ClusterNumber = closestCluster
}
}
counter++
if changes == 0 || counter > threshold {
return data, nil
}
}
return data, nil
}
// K-Means Algorithm with smart seeds
// as known as K-Means ++
func Kmeans(rawData [][]float64, k int, distanceFunction DistanceFunction, threshold int) ([]int, error) {
data := make([]ClusteredObservation, len(rawData))
for ii, jj := range rawData {
data[ii].Observation = jj
}
seeds := seed(data, k, distanceFunction)
clusteredData, err := kmeans(data, seeds, distanceFunction, threshold)
labels := make([]int, len(clusteredData))
for ii, jj := range clusteredData {
labels[ii] = jj.ClusterNumber
}
return labels, err
}