-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
71 lines (53 loc) · 1.88 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
#include "TrainingData.h"
#include "Net.h"
#define ETA 0.5 // net learning rate, [0.0..1.0]
#define trainingFile "trainingData.txt"
void showVectorVals(std::string label, std::vector<double> &v)
{
std::cout << label << " ";
for (unsigned i = 0; i < v.size(); ++i) {
std::cout << v[i] << " ";
}
std::cout << std::endl;
}
double activationFunc(double net) {
return 1.0/(1.0+exp(-net));
}
double activationFuncDerivative(double net) {
return activationFunc(net)*(1-activationFunc(net));
}
int main()
{
TrainingData trainData(trainingFile);
// e.g., { 2, 4, 1 }
std::vector<unsigned> topology;
// get the structure of the net
trainData.getTopology(topology);
Net myNet(topology, ETA, &activationFunc, &activationFuncDerivative);
std::vector<double> inputVals, targetVals, resultVals;
int trainingIteration = 0;
while (!trainData.isEof()) {
++trainingIteration;
std::cout << std::endl << "Iteration " << trainingIteration ;
// Get new input data and feed it forward:
if (trainData.getNextInputs(inputVals) != topology[0]) {
break;
}
showVectorVals(": Inputs:", inputVals);
// feed Forward
myNet.feedForward(inputVals);
// Collect the net's actual output results:
myNet.getResults(resultVals);
showVectorVals("Outputs:", resultVals);
// Train the net what the outputs should have been:
trainData.getTargetOutputs(targetVals);
showVectorVals("Targets:", targetVals);
assert(targetVals.size() == topology.back());
myNet.backProp(targetVals);
// Report how well the training is working, average over recent samples:
std::cout << "loss: " << myNet.getError() << std::endl;
//if (trainingIteration==10000)
// break;
}
std::cout << std::endl << "Training Complete" << std::endl;
}