forked from opengazer/OpenGazer
-
Notifications
You must be signed in to change notification settings - Fork 21
/
Copy pathGazeTracker.cpp
executable file
·154 lines (113 loc) · 4.89 KB
/
GazeTracker.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
#include <fstream>
#include <boost/lexical_cast.hpp>
#include "GazeTracker.h"
#include "Point.h"
#include "mir.h"
#include "Application.h"
static void ignore(const cv::Mat *) {
}
GazeTracker::GazeTracker()
{
gazePoint.x = 0;
gazePoint.y = 0;
_eyeExtractor = NULL;
_pointTracker = NULL;
}
bool GazeTracker::isActive() {
return _gaussianProcessX.get() && _gaussianProcessY.get();
}
void GazeTracker::addExemplar() {
// Add new sample to the GPs. Save the image samples (average eye images) in corresponding vectors
_calibrationTargetImages.push_back(_eyeExtractor->averageEye->getMean());
_calibrationTargetImagesLeft.push_back(_eyeExtractor->averageEyeLeft->getMean());
updateGaussianProcesses();
}
void GazeTracker::clear() {
// Clear previous calibration information
_calibrationTargetImages.clear();
_calibrationTargetImagesLeft.clear();
_calibrationTargetImagesAllFrames.clear();
_calibrationTargetImagesLeftAllFrames.clear();
_calibrationTargetPointsAllFrames.clear();
// Reset GP estimators
_gaussianProcessX.reset(NULL);
_gaussianProcessY.reset(NULL);
_gaussianProcessXLeft.reset(NULL);
_gaussianProcessYLeft.reset(NULL);
}
void GazeTracker::draw() {
if (!Application::Data::isTrackingSuccessful)
return;
cv::Mat image = Application::Components::videoInput->debugFrame;
// If not blinking, draw the estimations to debug window
if (isActive() && !_eyeExtractor->isBlinking()) {
cv::Point estimation(gazePoint.x, gazePoint.y);
cv::circle(image,
Utils::mapFromSecondMonitorToDebugFrameCoordinates(estimation),
8, cv::Scalar(0, 255, 0), -1, 8, 0);
}
}
void GazeTracker::process() {
if(_pointTracker == NULL) {
_pointTracker = (PointTracker*) Application::getComponent("PointTracker");
}
if(_eyeExtractor == NULL) {
_eyeExtractor = (EyeExtractor*) Application::getComponent("EyeExtractor");
}
if (!Application::Data::isTrackingSuccessful) {
return;
}
// If recalibration is necessary (there is a new target), recalibrate the Gaussian Processes
if(Application::Components::calibrator->needRecalibration) {
addExemplar();
}
if(Application::Components::calibrator->isActive()
&& Application::Components::calibrator->getPointFrameNo() >= 11
&& !_eyeExtractor->isBlinking()) {
// Add current sample (not the average, but sample from each usable frame) to the vector
cv::Mat *temp = new cv::Mat(EyeExtractor::eyeSize, CV_32FC1);
_eyeExtractor->eyeFloat.copyTo(*temp);
Utils::SharedImage temp2(new cv::Mat(temp->size(), temp->type()), Utils::releaseImage);
_calibrationTargetImagesAllFrames.push_back(temp2);
// Repeat for left eye
temp = new cv::Mat(EyeExtractor::eyeSize, CV_32FC1);
_eyeExtractor->eyeFloatLeft.copyTo(*temp);
Utils::SharedImage temp3(new cv::Mat(temp->size(), temp->type()), Utils::releaseImage);
_calibrationTargetImagesLeftAllFrames.push_back(temp3);
_calibrationTargetPointsAllFrames.push_back(Application::Components::calibrator->getActivePoint());
}
// Update the left and right estimations
updateEstimations();
}
void GazeTracker::updateEstimations() {
if (isActive()) {
cv::Mat *image = &_eyeExtractor->eyeFloat;
cv::Mat *leftImage = &_eyeExtractor->eyeFloatLeft;
gazePoint.x = (_gaussianProcessX->getmean(Utils::SharedImage(image, &ignore)) + _gaussianProcessXLeft->getmean(Utils::SharedImage(leftImage, &ignore))) / 2;
gazePoint.y = (_gaussianProcessY->getmean(Utils::SharedImage(image, &ignore)) + _gaussianProcessYLeft->getmean(Utils::SharedImage(leftImage, &ignore))) / 2;
// Make sure estimation stays in the screen area
Utils::boundToScreenArea(gazePoint);
}
}
double GazeTracker::imageDistance(const cv::Mat *image1, const cv::Mat *image2) {
double norm = cv::norm(*image1, *image2, CV_L2);
return norm * norm;
}
double GazeTracker::covarianceFunction(Utils::SharedImage const &image1, Utils::SharedImage const &image2) {
static double sigma = Utils::getParameterAsDouble("sigma", 2.0);
static double lscale = Utils::getParameterAsDouble("lscale", 2000.0);
return sigma * sigma * exp(-imageDistance(image1.get(), image2.get()) / (2 * lscale * lscale));
}
void GazeTracker::updateGaussianProcesses() {
std::vector<double> xLabels;
std::vector<double> yLabels;
// Prepare separate vector of targets for X and Y directions
for (int i = 0; i < Application::Data::calibrationTargets.size(); i++) {
xLabels.push_back(Application::Data::calibrationTargets[i].x);
yLabels.push_back(Application::Data::calibrationTargets[i].y);
}
_gaussianProcessX.reset(new ImProcess(_calibrationTargetImages, xLabels, covarianceFunction, 0.01));
_gaussianProcessY.reset(new ImProcess(_calibrationTargetImages, yLabels, covarianceFunction, 0.01));
_gaussianProcessXLeft.reset(new ImProcess(_calibrationTargetImagesLeft, xLabels, covarianceFunction, 0.01));
_gaussianProcessYLeft.reset(new ImProcess(_calibrationTargetImagesLeft, yLabels, covarianceFunction, 0.01));
}