-
Notifications
You must be signed in to change notification settings - Fork 1
/
oneClassSvm.go
78 lines (64 loc) · 1.74 KB
/
oneClassSvm.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
package anomaly
import (
"math"
)
type OneClassSVM struct {
SupportVectors []float64
Bias float64
Gamma float64
Kernel func(x, y, gamma float64) float64
}
func NewOneClassSVM(gamma float64, kernel func(x, y, gamma float64) float64) *OneClassSVM {
return &OneClassSVM{
SupportVectors: nil,
Bias: 0,
Gamma: gamma,
Kernel: kernel,
}
}
func (svm *OneClassSVM) Fit(normalData []float64) {
numSamples := len(normalData)
// Initialization of the gramian matrix
gramMatrix := make([][]float64, numSamples)
for i := range gramMatrix {
gramMatrix[i] = make([]float64, numSamples)
}
// Calculation of the gramian value
for i := 0; i < numSamples; i++ {
for j := 0; j < numSamples; j++ {
gramMatrix[i][j] = svm.Kernel(normalData[i], normalData[j], svm.Gamma)
}
}
// Calculating the bias value
sumAlphas := 0.0
for i := 0; i < numSamples; i++ {
sumAlphas += gramMatrix[i][i]
}
svm.Bias = sumAlphas / float64(numSamples)
// Storing support vectors
svm.SupportVectors = make([]float64, 0)
for i := 0; i < numSamples; i++ {
if gramMatrix[i][i] >= svm.Bias {
svm.SupportVectors = append(svm.SupportVectors, normalData[i])
}
}
}
func (svm *OneClassSVM) Predict(potentialAnomalies []float64) []float64 {
var anomalies []float64
for _, dataPoint := range potentialAnomalies {
result := -svm.Bias
for _, sv := range svm.SupportVectors {
result += svm.Kernel(sv, dataPoint, svm.Gamma)
}
if result < 0 {
anomalies = append(anomalies, dataPoint)
}
}
return anomalies
}
// RBF (Radial Basis Function) kernel
func rbfKernel(x, y, gamma float64) float64 {
diff := x - y
squaredEuclideanDistance := diff * diff
return math.Exp(-gamma * squaredEuclideanDistance)
}