-
-
Notifications
You must be signed in to change notification settings - Fork 23
/
Copy pathbody-tracking-skeleton-2d-cpu.html
112 lines (101 loc) · 4.41 KB
/
body-tracking-skeleton-2d-cpu.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Kinect Azure Example</title>
<link rel="stylesheet" href="../assets/vendors/bootstrap-4.3.1-dist/css/bootstrap.css">
<link rel="stylesheet" href="../assets/vendors/bootstrap-4.3.1-dist/css/docs.min.css">
</head>
<body class="container-fluid py-3">
<div class="d-flex align-items-baseline justify-content-between">
<h1 class="bd-title">Body Tracking (2D, CPU mode)</h1>
<button onclick="require('electron').remote.getCurrentWebContents().openDevTools()">open dev tools</button>
</div>
<p>
This demo shows the 2D Skeleton Information, and uses the CPU instead of GPU.
</p>
<canvas id="outputCanvas" class="img-fluid"></canvas>
<div class="row">
<div class="col col-auto">Renderer: <div id="statsRenderer"></div></div>
<div class="col col-auto">Kinect: <div id="statsKinect"></div></div>
</div>
<script src="../assets/vendors/stats.min.js"></script>
<script>
{
const statsRenderer = new Stats();
statsRenderer.dom.style.cssText = '';
document.getElementById('statsRenderer').appendChild( statsRenderer.dom );
const statsKinect = new Stats();
statsKinect.dom.style.cssText = '';
document.getElementById('statsKinect').appendChild( statsKinect.dom );
const KinectAzure = require('kinect-azure');
const kinect = new KinectAzure();
const $outputCanvas = document.getElementById('outputCanvas'),
outputCtx = $outputCanvas.getContext('2d');
let outputImageData, depthModeRange;
const init = () => {
startKinect();
animate();
};
const startKinect = () => {
if(kinect.open()) {
kinect.startCameras({
depth_mode: KinectAzure.K4A_DEPTH_MODE_NFOV_UNBINNED
});
depthModeRange = kinect.getDepthModeRange(KinectAzure.K4A_DEPTH_MODE_NFOV_UNBINNED);
kinect.createTracker({
processing_mode: KinectAzure.K4ABT_TRACKER_PROCESSING_MODE_CPU
});
kinect.startListening((data) => {
statsKinect.update();
if (!outputImageData && data.depthImageFrame.width > 0) {
$outputCanvas.width = data.depthImageFrame.width;
$outputCanvas.height = data.depthImageFrame.height;
outputImageData = outputCtx.createImageData($outputCanvas.width, $outputCanvas.height);
}
if (outputImageData) {
renderDepthFrameAsGreyScale(outputCtx, outputImageData, data.depthImageFrame);
}
if (data.bodyFrame.bodies) {
// render the skeleton joints on top of the color feed
outputCtx.save();
outputCtx.fillStyle = 'red';
data.bodyFrame.bodies.forEach(body => {
body.skeleton.joints.forEach(joint => {
outputCtx.fillRect(joint.colorX, joint.colorY, 10, 10);
});
});
outputCtx.restore();
}
});
}
};
const renderDepthFrameAsGreyScale = (ctx, canvasImageData, imageFrame) => {
const newPixelData = Buffer.from(imageFrame.imageData);
const pixelArray = canvasImageData.data;
let depthPixelIndex = 0;
for (let i = 0; i < canvasImageData.data.length; i+=4) {
const depthValue = newPixelData[depthPixelIndex+1] << 8 | newPixelData[depthPixelIndex];
const normalizedValue = map(depthValue, depthModeRange.min, depthModeRange.max, 255, 0);
pixelArray[i] = normalizedValue;
pixelArray[i+1] = normalizedValue;
pixelArray[i+2] = normalizedValue;
pixelArray[i+3] = 0xff;
depthPixelIndex += 2;
}
ctx.putImageData(canvasImageData, 0, 0);
};
const map = (value, inputMin, inputMax, outputMin, outputMax) => {
return (value - inputMin) * (outputMax - outputMin) / (inputMax - inputMin) + outputMin;
};
const animate = () => {
statsRenderer.update();
requestAnimationFrame( animate );
}
// expose the kinect instance to the window object in this demo app to allow the parent window to close it between sessions
window.kinect = kinect;
init();
}
</script>
</body>
</html>