t1 / TFDContents / Assets / KinectScripts / Interfaces / Kinect2UwpInterface.cs @ 10
이력 | 보기 | 이력해설 | 다운로드 (46.7 KB)
1 |
#if UNITY_WSA_10_0 && NETFX_CORE |
---|---|
2 |
using UnityEngine; |
3 |
using System.Collections; |
4 |
|
5 |
using MultiK2; |
6 |
using System; |
7 |
using System.Threading; |
8 |
using System.Threading.Tasks; |
9 |
using System.Runtime.InteropServices; |
10 |
using System.Runtime.InteropServices.WindowsRuntime; |
11 |
using Windows.UI.Xaml.Media.Imaging; |
12 |
using Windows.Graphics.Imaging; |
13 |
using MultiK2.Tracking; |
14 |
using Windows.Media.Capture.Frames; |
15 |
using Windows.Media.SpeechRecognition; |
16 |
|
17 |
|
18 |
public class Kinect2UwpInterface : DepthSensorInterface |
19 |
{ |
20 |
private KinectInterop.SensorData sensorData; |
21 |
private KinectInterop.FrameSource sensorFlags; |
22 |
|
23 |
private Sensor _kinectSensor; |
24 |
private ColorFrameReader _colorReader; |
25 |
private DepthFrameReader _depthReader; |
26 |
private BodyIndexFrameReader _bodyIndexReader; |
27 |
private BodyFrameReader _bodyReader; |
28 |
|
29 |
private CameraIntrinsics _colorCameraIntrinsics; |
30 |
private CameraIntrinsics _depthCameraIntrinsics; |
31 |
private CoordinateMapper _coordinateMapper; |
32 |
private CoordinateMapper2 _coordinateMapper2; |
33 |
|
34 |
private SpeechRecognizer speechRecognizer; |
35 |
private Task<SpeechRecognitionResult> speechRecognizeTask; |
36 |
private float requiredPhraseConfidence = 0f; |
37 |
|
38 |
private bool isPhraseRecognized; |
39 |
private string recognizedPhraseTag; |
40 |
private float recognizedPhraseConfidence; |
41 |
|
42 |
private byte[] _colorDataBuf = null; |
43 |
private bool _colorDataReady = false; |
44 |
private long _colorDataTime = 0; |
45 |
private object _colorDataLock = new object(); |
46 |
|
47 |
private byte[] _depthDataBuf = null; |
48 |
private bool _depthDataReady = false; |
49 |
private long _depthDataTime = 0; |
50 |
private object _depthDataLock = new object(); |
51 |
|
52 |
private byte[] _bodyIndexDataBuf = null; |
53 |
private bool _bodyIndexDataReady = false; |
54 |
private long _bodyIndexDataTime = 0; |
55 |
private object _bodyIndexDataLock = new object(); |
56 |
|
57 |
private BodyFrame _bodyFrame = null; |
58 |
private bool _bodyFrameReady = false; |
59 |
private long _bodyFrameTime = 0; |
60 |
private object _bodyFrameLock = new object(); |
61 |
|
62 |
private bool _isDoubleDepthBufNeeded = false; |
63 |
private ushort[] _lastDepthDataBuf = null; |
64 |
private long _lastDepthDataTime = 0; |
65 |
|
66 |
private bool _depth2spaceTaskStarted = false; |
67 |
//private bool _depth2colorTaskStarted = false; |
68 |
//private bool _color2depthTaskStarted = false; |
69 |
|
70 |
private bool _saveLatestFrames = false; |
71 |
private bool _clearLatestFrames = false; |
72 |
|
73 |
private MediaFrameReference _latestColorFrame = null; |
74 |
private MediaFrameReference _latestDepthFrame = null; |
75 |
private MediaFrameReference _latestBodyIndexFrame = null; |
76 |
private MediaFrameReference _latestBodyFrame = null; |
77 |
private MediaFrameReference _latestInfraredFrame = null; |
78 |
|
79 |
private System.Numerics.Vector3[] _color2SpacePoints = null; |
80 |
//private float[] _color2depthDepth = null; |
81 |
|
82 |
private Vector3[] _depth2SpaceTable = null; |
83 |
|
84 |
private ComputeShader _coordMapperShader = null; |
85 |
private int _depth2colorKernel = 0; |
86 |
private int _color2depthKernel = 0; |
87 |
|
88 |
private ComputeBuffer _depthPlaneCoordsBuf = null; |
89 |
private ComputeBuffer _depthDepthValuesBuf = null; |
90 |
private ComputeBuffer _colorPlaneCoordsBuf = null; |
91 |
private ComputeBuffer _colorSpaceCoordsBuf = null; |
92 |
private ComputeBuffer _colorDepthCoordsBuf = null; |
93 |
|
94 |
private bool _backgroundRemovalInited = false; |
95 |
|
96 |
|
97 |
public KinectInterop.DepthSensorPlatform GetSensorPlatform() |
98 |
{ |
99 |
return KinectInterop.DepthSensorPlatform.KinectUWPv2; |
100 |
} |
101 |
|
102 |
public bool InitSensorInterface(bool bCopyLibs, ref bool bNeedRestart) |
103 |
{ |
104 |
bNeedRestart = false; |
105 |
return true; |
106 |
} |
107 |
|
108 |
public void FreeSensorInterface(bool bDeleteLibs) |
109 |
{ |
110 |
} |
111 |
|
112 |
public bool IsSensorAvailable() |
113 |
{ |
114 |
return true; |
115 |
} |
116 |
|
117 |
public int GetSensorsCount() |
118 |
{ |
119 |
return 1; |
120 |
} |
121 |
|
122 |
public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource) |
123 |
{ |
124 |
if (sensorData == null) |
125 |
{ |
126 |
sensorData = new KinectInterop.SensorData(); |
127 |
} |
128 |
|
129 |
sensorFlags = dwFlags; |
130 |
|
131 |
sensorData.bodyCount = 6; |
132 |
sensorData.jointCount = 25; |
133 |
|
134 |
sensorData.depthCameraFOV = 60f; |
135 |
sensorData.colorCameraFOV = 53.8f; |
136 |
sensorData.depthCameraOffset = 0f; |
137 |
sensorData.faceOverlayOffset = 0f; |
138 |
|
139 |
// by-default image widths & heights |
140 |
sensorData.colorImageWidth = 1920; |
141 |
sensorData.colorImageHeight = 1080; |
142 |
|
143 |
sensorData.depthImageWidth = 512; |
144 |
sensorData.depthImageHeight = 424; |
145 |
|
146 |
_saveLatestFrames = bUseMultiSource; |
147 |
|
148 |
Task task = null; |
149 |
// UnityEngine.WSA.Application.InvokeOnUIThread(() => |
150 |
// { |
151 |
task = InitializeKinect(); |
152 |
// }, true); |
153 |
|
154 |
while (task != null && !task.IsCompleted) |
155 |
{ |
156 |
task.Wait(100); |
157 |
} |
158 |
|
159 |
return (_kinectSensor != null && _kinectSensor.IsActive) ? sensorData : null; |
160 |
} |
161 |
|
162 |
private async Task InitializeKinect() |
163 |
{ |
164 |
_kinectSensor = await Sensor.GetDefaultAsync(); |
165 |
|
166 |
if (_kinectSensor != null) |
167 |
{ |
168 |
await _kinectSensor.OpenAsync(); |
169 |
|
170 |
if ((sensorFlags & KinectInterop.FrameSource.TypeColor) != 0) |
171 |
{ |
172 |
if (sensorData.colorImage == null) |
173 |
{ |
174 |
sensorData.colorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 4]; |
175 |
} |
176 |
|
177 |
_colorReader = await _kinectSensor.OpenColorFrameReaderAsync(ReaderConfig.HalfRate | ReaderConfig.HalfResolution); |
178 |
if (_colorReader != null) |
179 |
{ |
180 |
_colorReader.FrameArrived += ColorReader_FrameArrived; |
181 |
} |
182 |
} |
183 |
|
184 |
if ((sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0) |
185 |
{ |
186 |
if (sensorData.depthImage == null) |
187 |
{ |
188 |
sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; |
189 |
} |
190 |
|
191 |
_depthReader = await _kinectSensor.OpenDepthFrameReaderAsync(); |
192 |
if (_depthReader != null) |
193 |
{ |
194 |
_depthReader.FrameArrived += DepthReader_FrameArrived; |
195 |
} |
196 |
} |
197 |
|
198 |
if ((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0) |
199 |
{ |
200 |
if (sensorData.bodyIndexImage == null) |
201 |
{ |
202 |
sensorData.bodyIndexImage = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight]; |
203 |
} |
204 |
|
205 |
_bodyIndexReader = await _kinectSensor.OpenBodyIndexFrameReaderAsync(); |
206 |
if (_bodyIndexReader != null) |
207 |
{ |
208 |
_bodyIndexReader.FrameArrived += BodyIndexReader_FrameArrived; |
209 |
} |
210 |
} |
211 |
|
212 |
if ((sensorFlags & KinectInterop.FrameSource.TypeBody) != 0) |
213 |
{ |
214 |
_bodyReader = await _kinectSensor.OpenBodyFrameReaderAsync(); |
215 |
if (_bodyReader != null) |
216 |
{ |
217 |
_bodyReader.FrameArrived += BodyReader_FrameArrived; |
218 |
} |
219 |
} |
220 |
|
221 |
// get the coordinate mapper |
222 |
_coordinateMapper = _kinectSensor.GetCoordinateMapper(); |
223 |
_coordinateMapper2 = new CoordinateMapper2(); |
224 |
|
225 |
Debug.Log("UWP-K2 sensor opened"); |
226 |
} |
227 |
else |
228 |
{ |
229 |
Debug.Log("UWP-K2 sensor not found"); |
230 |
} |
231 |
} |
232 |
|
233 |
private void ColorReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e) |
234 |
{ |
235 |
_colorCameraIntrinsics = e.CameraIntrinsics; |
236 |
|
237 |
if (_colorDataBuf == null || sensorData.colorImageWidth != e.Bitmap.PixelWidth || sensorData.colorImageHeight != e.Bitmap.PixelHeight) |
238 |
{ |
239 |
sensorData.colorImageWidth = e.Bitmap.PixelWidth; |
240 |
sensorData.colorImageHeight = e.Bitmap.PixelHeight; |
241 |
|
242 |
int imageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight * 4; |
243 |
|
244 |
lock (_colorDataLock) |
245 |
{ |
246 |
//_colorDataBuf = new byte[imageLen]; |
247 |
//sensorData.colorImage = new byte[imageLen]; |
248 |
Array.Resize<byte>(ref _colorDataBuf, imageLen); |
249 |
Array.Resize<byte>(ref sensorData.colorImage, imageLen); |
250 |
} |
251 |
} |
252 |
|
253 |
if (_colorDataBuf != null) |
254 |
{ |
255 |
// convert the bitmap |
256 |
SoftwareBitmap convertedBitmap = SoftwareBitmap.Convert(e.Bitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Straight); |
257 |
|
258 |
lock (_colorDataLock) |
259 |
{ |
260 |
convertedBitmap?.CopyToBuffer(_colorDataBuf.AsBuffer()); |
261 |
convertedBitmap?.Dispose(); |
262 |
|
263 |
if (_saveLatestFrames) |
264 |
{ |
265 |
_latestColorFrame = e.Frame; |
266 |
} |
267 |
|
268 |
_colorDataTime = DateTime.Now.Ticks; // colorFrame.RelativeTime.Ticks; |
269 |
_colorDataReady = true; |
270 |
} |
271 |
|
272 |
} |
273 |
} |
274 |
|
275 |
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) |
276 |
{ |
277 |
_depthCameraIntrinsics = e.CameraIntrinsics; |
278 |
|
279 |
if (_depthDataBuf == null || sensorData.depthImageWidth != e.Bitmap.PixelWidth || sensorData.depthImageHeight != e.Bitmap.PixelHeight) |
280 |
{ |
281 |
sensorData.depthImageWidth = e.Bitmap.PixelWidth; |
282 |
sensorData.depthImageHeight = e.Bitmap.PixelHeight; |
283 |
|
284 |
int imageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight * sizeof(ushort); |
285 |
|
286 |
lock (_depthDataLock) |
287 |
{ |
288 |
//_depthDataBuf = new byte[imageLen]; |
289 |
//sensorData.depthImage = new ushort[e.Bitmap.PixelWidth * e.Bitmap.PixelHeight]; |
290 |
Array.Resize<byte>(ref _depthDataBuf, imageLen); |
291 |
Array.Resize<ushort>(ref sensorData.depthImage, e.Bitmap.PixelWidth * e.Bitmap.PixelHeight); |
292 |
} |
293 |
|
294 |
int biImageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight; |
295 |
|
296 |
lock (_bodyIndexDataLock) |
297 |
{ |
298 |
//_bodyIndexDataBuf = new byte[biImageLen]; |
299 |
//sensorData.bodyIndexImage = new byte[biImageLen]; |
300 |
Array.Resize<byte>(ref _bodyIndexDataBuf, biImageLen); |
301 |
Array.Resize<byte>(ref sensorData.bodyIndexImage, biImageLen); |
302 |
} |
303 |
} |
304 |
|
305 |
if (_depthDataBuf != null) |
306 |
{ |
307 |
lock (_depthDataLock) |
308 |
{ |
309 |
e.Bitmap.CopyToBuffer(_depthDataBuf.AsBuffer()); |
310 |
|
311 |
if (_saveLatestFrames) |
312 |
{ |
313 |
_latestDepthFrame = e.Frame; |
314 |
} |
315 |
|
316 |
_depthDataTime = DateTime.Now.Ticks; // depthFrame.RelativeTime.Ticks; |
317 |
_depthDataReady = true; |
318 |
} |
319 |
|
320 |
} |
321 |
} |
322 |
|
323 |
private void BodyIndexReader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e) |
324 |
{ |
325 |
if (_bodyIndexDataBuf != null) |
326 |
{ |
327 |
lock (_bodyIndexDataLock) |
328 |
{ |
329 |
e.Bitmap.CopyToBuffer(_bodyIndexDataBuf.AsBuffer()); |
330 |
|
331 |
if (_saveLatestFrames) |
332 |
{ |
333 |
_latestBodyIndexFrame = e.Frame; |
334 |
} |
335 |
|
336 |
_bodyIndexDataTime = DateTime.Now.Ticks; // bodyIndexFrame.RelativeTime.Ticks; |
337 |
_bodyIndexDataReady = true; |
338 |
} |
339 |
|
340 |
} |
341 |
} |
342 |
|
343 |
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) |
344 |
{ |
345 |
lock (_bodyFrameLock) |
346 |
{ |
347 |
_bodyFrame = e.BodyFrame; |
348 |
|
349 |
if (_saveLatestFrames) |
350 |
{ |
351 |
_latestBodyFrame = e.Frame; |
352 |
} |
353 |
|
354 |
_bodyFrameTime = DateTime.Now.Ticks; // _bodyFrame.SystemRelativeTime.Value.Ticks; |
355 |
_bodyFrameReady = true; |
356 |
} |
357 |
} |
358 |
|
359 |
public void CloseSensor(KinectInterop.SensorData sensorData) |
360 |
{ |
361 |
// UnityEngine.WSA.Application.InvokeOnUIThread(() => |
362 |
// { |
363 |
_kinectSensor?.CloseAsync(); |
364 |
Debug.Log("UWP-K2 sensor closed"); |
365 |
// }, true); |
366 |
|
367 |
if (_depthPlaneCoordsBuf != null) |
368 |
{ |
369 |
_depthPlaneCoordsBuf.Release(); |
370 |
_depthPlaneCoordsBuf = null; |
371 |
} |
372 |
|
373 |
if (_depthDepthValuesBuf != null) |
374 |
{ |
375 |
_depthDepthValuesBuf.Release(); |
376 |
_depthDepthValuesBuf = null; |
377 |
} |
378 |
|
379 |
if (_colorPlaneCoordsBuf != null) |
380 |
{ |
381 |
_colorPlaneCoordsBuf.Release(); |
382 |
_colorPlaneCoordsBuf = null; |
383 |
} |
384 |
|
385 |
if (_colorSpaceCoordsBuf != null) |
386 |
{ |
387 |
_colorSpaceCoordsBuf.Release(); |
388 |
_colorSpaceCoordsBuf = null; |
389 |
} |
390 |
|
391 |
if (_colorDepthCoordsBuf != null) |
392 |
{ |
393 |
_colorDepthCoordsBuf.Release(); |
394 |
_colorDepthCoordsBuf = null; |
395 |
} |
396 |
|
397 |
_colorCameraIntrinsics = null; |
398 |
_depthCameraIntrinsics = null; |
399 |
_coordinateMapper = null; |
400 |
_coordinateMapper2 = null; |
401 |
|
402 |
_coordMapperShader = null; |
403 |
_lastDepthDataBuf = null; |
404 |
|
405 |
_clearLatestFrames = true; |
406 |
FreeMultiSourceFrame(sensorData); |
407 |
} |
408 |
|
409 |
public bool UpdateSensorData(KinectInterop.SensorData sensorData) |
410 |
{ |
411 |
return true; |
412 |
} |
413 |
|
414 |
public bool GetMultiSourceFrame(KinectInterop.SensorData sensorData) |
415 |
{ |
416 |
if (_saveLatestFrames) |
417 |
{ |
418 |
bool bAllSet = |
419 |
((sensorFlags & KinectInterop.FrameSource.TypeColor) == 0 || _latestColorFrame != null) && |
420 |
((sensorFlags & KinectInterop.FrameSource.TypeDepth) == 0 || _latestDepthFrame != null) && |
421 |
((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) == 0 || _latestBodyIndexFrame != null) && |
422 |
((sensorFlags & KinectInterop.FrameSource.TypeBody) == 0 || _latestBodyFrame != null) && |
423 |
((sensorFlags & KinectInterop.FrameSource.TypeInfrared) == 0 || _latestInfraredFrame != null); |
424 |
|
425 |
return bAllSet; |
426 |
} |
427 |
|
428 |
return false; |
429 |
} |
430 |
|
431 |
public void FreeMultiSourceFrame(KinectInterop.SensorData sensorData) |
432 |
{ |
433 |
if (_clearLatestFrames) |
434 |
{ |
435 |
lock (_colorDataLock) |
436 |
{ |
437 |
_latestColorFrame = null; |
438 |
} |
439 |
|
440 |
lock (_depthDataLock) |
441 |
{ |
442 |
_latestDepthFrame = null; |
443 |
} |
444 |
|
445 |
lock (_bodyIndexDataLock) |
446 |
{ |
447 |
_latestBodyIndexFrame = null; |
448 |
} |
449 |
|
450 |
lock (_bodyFrameLock) |
451 |
{ |
452 |
_latestBodyFrame = null; |
453 |
} |
454 |
|
455 |
_clearLatestFrames = false; |
456 |
} |
457 |
} |
458 |
|
459 |
public bool PollBodyFrame(KinectInterop.SensorData sensorData, ref KinectInterop.BodyFrameData bodyFrame, ref Matrix4x4 kinectToWorld, bool bIgnoreJointZ) |
460 |
{ |
461 |
bool bNewFrame = _bodyFrameReady; |
462 |
|
463 |
if (_bodyFrameReady) |
464 |
{ |
465 |
lock (_bodyFrameLock) |
466 |
{ |
467 |
bodyFrame.liPreviousTime = bodyFrame.liRelativeTime; |
468 |
bodyFrame.liRelativeTime = _bodyFrameTime; |
469 |
|
470 |
if (sensorData.hintHeightAngle) |
471 |
{ |
472 |
//// get the floor plane |
473 |
//Windows.Kinect.Vector4 vFloorPlane = _bodyFrame.FloorClipPlane; |
474 |
//Vector3 floorPlane = new Vector3(vFloorPlane.X, vFloorPlane.Y, vFloorPlane.Z); |
475 |
|
476 |
//sensorData.sensorRotDetected = Quaternion.FromToRotation(floorPlane, Vector3.up); |
477 |
//sensorData.sensorHgtDetected = vFloorPlane.W; |
478 |
} |
479 |
|
480 |
for (int i = 0; i < sensorData.bodyCount; i++) |
481 |
{ |
482 |
Body body = i < _bodyFrame.Bodies.Length ? _bodyFrame.Bodies[i] : null; |
483 |
|
484 |
if (body == null) |
485 |
{ |
486 |
bodyFrame.bodyData[i].bIsTracked = 0; |
487 |
continue; |
488 |
} |
489 |
|
490 |
bodyFrame.bodyData[i].bIsTracked = (short)(body.IsTracked ? 1 : 0); |
491 |
|
492 |
if (body.IsTracked) |
493 |
{ |
494 |
// transfer body and joints data |
495 |
byte[] entityBytes = body.EntityId.ToByteArray(); |
496 |
bodyFrame.bodyData[i].liTrackingID = BitConverter.ToInt64(entityBytes, 8); |
497 |
|
498 |
// cache the body joints (following the advice of Brian Chasalow) |
499 |
//Dictionary<Windows.Kinect.JointType, Windows.Kinect.Joint> bodyJoints = body.Joints; |
500 |
|
501 |
// calculate the inter-frame time |
502 |
float frameTime = 0f; |
503 |
if (bodyFrame.bTurnAnalisys && bodyFrame.liPreviousTime > 0) |
504 |
{ |
505 |
frameTime = (float)(bodyFrame.liRelativeTime - bodyFrame.liPreviousTime) / 100000000000; |
506 |
} |
507 |
|
508 |
for (int j = 0; j < sensorData.jointCount; j++) |
509 |
{ |
510 |
if (j >= body.Joints.Count) |
511 |
continue; |
512 |
|
513 |
MultiK2.Tracking.Joint joint = body.Joints[(MultiK2.Tracking.JointType)j]; |
514 |
KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
515 |
|
516 |
//jointData.jointType = (KinectInterop.JointType)j; |
517 |
jointData.trackingState = (KinectInterop.TrackingState)joint.PositionTrackingState; |
518 |
|
519 |
if ((int)joint.PositionTrackingState != (int)TrackingState.NotTracked) |
520 |
{ |
521 |
float jPosZ = (bIgnoreJointZ && j > 0) ? bodyFrame.bodyData[i].joint[0].kinectPos.z : joint.Position.Z; |
522 |
jointData.kinectPos = new Vector3(joint.Position.X, joint.Position.Y, joint.Position.Z); |
523 |
jointData.position = kinectToWorld.MultiplyPoint3x4(new Vector3(joint.Position.X, joint.Position.Y, jPosZ)); |
524 |
} |
525 |
|
526 |
jointData.orientation = Quaternion.identity; |
527 |
|
528 |
if (j == 0) |
529 |
{ |
530 |
bodyFrame.bodyData[i].position = jointData.position; |
531 |
bodyFrame.bodyData[i].orientation = jointData.orientation; |
532 |
} |
533 |
|
534 |
bodyFrame.bodyData[i].joint[j] = jointData; |
535 |
} |
536 |
|
537 |
//if (bodyFrame.bTurnAnalisys && bodyFrame.liPreviousTime > 0) |
538 |
//{ |
539 |
// for (int j = 0; j < sensorData.jointCount; j++) |
540 |
// { |
541 |
// KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
542 |
|
543 |
// int p = (int)GetParentJoint((KinectInterop.JointType)j); |
544 |
// Vector3 parentPos = bodyFrame.bodyData[i].joint[p].position; |
545 |
|
546 |
// jointData.posRel = jointData.position - parentPos; |
547 |
// jointData.posDrv = frameTime > 0f ? (jointData.position - jointData.posPrev) / frameTime : Vector3.zero; |
548 |
// jointData.posPrev = jointData.position; |
549 |
|
550 |
// bodyFrame.bodyData[i].joint[j] = jointData; |
551 |
// } |
552 |
//} |
553 |
|
554 |
// tranfer hand states |
555 |
bodyFrame.bodyData[i].leftHandState = (KinectInterop.HandState)body.HandStateLeft; |
556 |
bodyFrame.bodyData[i].leftHandConfidence = (KinectInterop.TrackingConfidence)body.ConfidenceLeft; |
557 |
|
558 |
bodyFrame.bodyData[i].rightHandState = (KinectInterop.HandState)body.HandStateRight; |
559 |
bodyFrame.bodyData[i].rightHandConfidence = (KinectInterop.TrackingConfidence)body.ConfidenceRight; |
560 |
} |
561 |
} |
562 |
|
563 |
_bodyFrameReady = false; |
564 |
} |
565 |
} |
566 |
|
567 |
return bNewFrame; |
568 |
} |
569 |
|
570 |
public bool PollColorFrame(KinectInterop.SensorData sensorData) |
571 |
{ |
572 |
bool bNewFrame = _colorDataReady; |
573 |
|
574 |
if (_colorDataReady) |
575 |
{ |
576 |
lock (_colorDataLock) |
577 |
{ |
578 |
Buffer.BlockCopy(_colorDataBuf, 0, sensorData.colorImage, 0, _colorDataBuf.Length); |
579 |
sensorData.lastColorFrameTime = _colorDataTime; |
580 |
_colorDataReady = false; |
581 |
} |
582 |
} |
583 |
|
584 |
return bNewFrame; |
585 |
} |
586 |
|
587 |
public bool PollDepthFrame(KinectInterop.SensorData sensorData) |
588 |
{ |
589 |
bool bNewFrame = _depthDataReady || _bodyIndexDataReady; |
590 |
|
591 |
if (_depthDataReady) |
592 |
{ |
593 |
if (_isDoubleDepthBufNeeded) |
594 |
{ |
595 |
if (_lastDepthDataBuf == null) |
596 |
{ |
597 |
_lastDepthDataBuf = new ushort[sensorData.depthImage.Length]; |
598 |
} |
599 |
|
600 |
Buffer.BlockCopy(sensorData.depthImage, 0, _lastDepthDataBuf, 0, _lastDepthDataBuf.Length * sizeof(ushort)); |
601 |
_lastDepthDataTime = sensorData.lastDepthFrameTime; |
602 |
} |
603 |
|
604 |
lock (_depthDataLock) |
605 |
{ |
606 |
Buffer.BlockCopy(_depthDataBuf, 0, sensorData.depthImage, 0, _depthDataBuf.Length); |
607 |
sensorData.lastDepthFrameTime = _depthDataTime; |
608 |
_depthDataReady = false; |
609 |
} |
610 |
} |
611 |
|
612 |
if (_bodyIndexDataReady) |
613 |
{ |
614 |
lock (_bodyIndexDataLock) |
615 |
{ |
616 |
Buffer.BlockCopy(_bodyIndexDataBuf, 0, sensorData.bodyIndexImage, 0, _bodyIndexDataBuf.Length); |
617 |
sensorData.lastBodyIndexFrameTime = _bodyIndexDataTime; |
618 |
_bodyIndexDataReady = false; |
619 |
} |
620 |
} |
621 |
|
622 |
return bNewFrame; |
623 |
} |
624 |
|
625 |
public bool PollInfraredFrame(KinectInterop.SensorData sensorData) |
626 |
{ |
627 |
return false; |
628 |
} |
629 |
|
630 |
public void FixJointOrientations(KinectInterop.SensorData sensorData, ref KinectInterop.BodyData bodyData) |
631 |
{ |
632 |
} |
633 |
|
634 |
public bool IsBodyTurned(ref KinectInterop.BodyData bodyData) |
635 |
{ |
636 |
return false; |
637 |
} |
638 |
|
639 |
public Vector2 MapSpacePointToDepthCoords(KinectInterop.SensorData sensorData, Vector3 spacePos) |
640 |
{ |
641 |
Vector2 vPoint = Vector2.zero; |
642 |
|
643 |
if (_depthCameraIntrinsics != null) |
644 |
{ |
645 |
System.Numerics.Vector3 camPoint = new System.Numerics.Vector3(spacePos.x, spacePos.y, spacePos.z); |
646 |
System.Numerics.Vector2 depthPoint = _depthCameraIntrinsics.ProjectOntoFrame(camPoint); |
647 |
|
648 |
if (depthPoint.X >= 0 && depthPoint.X < sensorData.depthImageWidth && |
649 |
depthPoint.Y >= 0 && depthPoint.Y < sensorData.depthImageHeight) |
650 |
{ |
651 |
vPoint = new Vector2(depthPoint.X, depthPoint.Y); |
652 |
} |
653 |
} |
654 |
|
655 |
return vPoint; |
656 |
} |
657 |
|
658 |
public Vector3 MapDepthPointToSpaceCoords(KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal) |
659 |
{ |
660 |
Vector3 vPoint = Vector3.zero; |
661 |
|
662 |
if (_depthCameraIntrinsics != null && depthPos != Vector2.zero) |
663 |
{ |
664 |
System.Numerics.Vector2 depthPoint = new System.Numerics.Vector2(depthPos.x, depthPos.y); |
665 |
System.Numerics.Vector3 camPoint = _depthCameraIntrinsics.UnprojectFromFrame(depthPoint, (float)depthVal / 1000f); |
666 |
|
667 |
vPoint = new Vector3(camPoint.X, camPoint.Y, camPoint.Z); |
668 |
} |
669 |
|
670 |
return vPoint; |
671 |
} |
672 |
|
673 |
public bool MapDepthFrameToSpaceCoords(KinectInterop.SensorData sensorData, ref Vector3[] vSpaceCoords) |
674 |
{ |
675 |
_isDoubleDepthBufNeeded = true; |
676 |
|
677 |
if (_depthCameraIntrinsics != null && sensorData.depthImage != null) |
678 |
{ |
679 |
int depthImageLength = sensorData.depthImageWidth * sensorData.depthImageHeight; |
680 |
|
681 |
if (_depth2SpaceTable == null || _depth2SpaceTable.Length != depthImageLength) |
682 |
{ |
683 |
_depth2SpaceTable = new Vector3[depthImageLength]; |
684 |
|
685 |
for (int dy = 0, di = 0; dy < sensorData.depthImageHeight; dy++) |
686 |
{ |
687 |
for (int dx = 0; dx < sensorData.depthImageWidth; dx++) |
688 |
{ |
689 |
System.Numerics.Vector2 depthPoint = new System.Numerics.Vector2(dx, dy); |
690 |
System.Numerics.Vector3 camPoint = _depthCameraIntrinsics.UnprojectFromFrame(depthPoint, 1f); |
691 |
|
692 |
_depth2SpaceTable[di] = new Vector3(camPoint.X, camPoint.Y, camPoint.Z); |
693 |
di++; |
694 |
} |
695 |
} |
696 |
} |
697 |
|
698 |
if (_lastDepthDataBuf != null && !_depth2spaceTaskStarted) |
699 |
{ |
700 |
_depth2spaceTaskStarted = true; |
701 |
|
702 |
//long timeStamp = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; |
703 |
|
704 |
//Task.Run( () => { |
705 |
|
706 |
for (int dy = 0, di = 0; dy < sensorData.depthImageHeight; dy++) |
707 |
{ |
708 |
for (int dx = 0; dx < sensorData.depthImageWidth; dx++) |
709 |
{ |
710 |
if (di >= 0 && di < _lastDepthDataBuf.Length && |
711 |
sensorData.depthImage[di] != _lastDepthDataBuf[di]) |
712 |
{ |
713 |
if (sensorData.depthImage[di] != 0) |
714 |
{ |
715 |
float depthVal = (float)sensorData.depthImage[di] / 1000f; |
716 |
vSpaceCoords[di] = _depth2SpaceTable[di] * depthVal; |
717 |
} |
718 |
else |
719 |
{ |
720 |
vSpaceCoords[di] = Vector3.zero; |
721 |
} |
722 |
} |
723 |
|
724 |
di++; |
725 |
} |
726 |
} |
727 |
|
728 |
_depth2spaceTaskStarted = false; |
729 |
//}); |
730 |
|
731 |
//long timeDuration = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond - timeStamp); |
732 |
//Debug.Log("depth2spaceTask() took " + timeDuration + " ms"); |
733 |
} |
734 |
} |
735 |
|
736 |
return true; |
737 |
} |
738 |
|
739 |
public Vector2 MapDepthPointToColorCoords(KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal) |
740 |
{ |
741 |
Vector2 vPoint = Vector2.zero; |
742 |
|
743 |
if (_coordinateMapper != null && _depthCameraIntrinsics != null && _colorCameraIntrinsics != null && depthPos != Vector2.zero) |
744 |
{ |
745 |
System.Numerics.Vector2 depthPoint = new System.Numerics.Vector2(depthPos.x, depthPos.y); |
746 |
System.Numerics.Vector3 depthSpace = _depthCameraIntrinsics.UnprojectFromFrame(depthPoint, (float)depthVal / 1000f); |
747 |
System.Numerics.Vector3 colorSpace = _coordinateMapper.MapDepthSpacePointToColor(depthSpace); |
748 |
System.Numerics.Vector2 colorPoint = _colorCameraIntrinsics.ProjectOntoFrame(colorSpace); |
749 |
|
750 |
vPoint = new Vector2(colorPoint.X, colorPoint.Y); |
751 |
} |
752 |
|
753 |
return vPoint; |
754 |
} |
755 |
|
756 |
public bool MapDepthFrameToColorCoords(KinectInterop.SensorData sensorData, ref Vector2[] vColorCoords) |
757 |
{ |
758 |
bool bReadyToMap = //_saveLatestFrames ? (_latestColorFrame != null && _latestDepthFrame != null) : |
759 |
sensorData.depthImage != null && sensorData.colorImage != null; |
760 |
|
761 |
if (bReadyToMap) |
762 |
{ |
763 |
if (_coordMapperShader == null || _colorPlaneCoordsBuf == null) |
764 |
{ |
765 |
CreateCoordMapperShader(sensorData, false); |
766 |
} |
767 |
|
768 |
if (_coordMapperShader) |
769 |
{ |
770 |
//long timeStamp = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; |
771 |
|
772 |
int depthImageLength = sensorData.depthImageWidth * sensorData.depthImageHeight; |
773 |
int[] depthDepthValues = new int[depthImageLength]; |
774 |
|
775 |
for (int di = 0; di < depthImageLength; di++) |
776 |
{ |
777 |
depthDepthValues[di] = sensorData.depthImage[di]; |
778 |
} |
779 |
|
780 |
_depthDepthValuesBuf.SetData(depthDepthValues); |
781 |
|
782 |
_coordMapperShader.Dispatch(_depth2colorKernel, depthImageLength / 64, 1, 1); |
783 |
|
784 |
if (vColorCoords == null || vColorCoords.Length != depthImageLength) |
785 |
{ |
786 |
vColorCoords = new Vector2[depthImageLength]; |
787 |
} |
788 |
|
789 |
_colorPlaneCoordsBuf.GetData(vColorCoords); |
790 |
|
791 |
//long timeDuration = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond - timeStamp); |
792 |
//Debug.Log("depth2colorTask() took " + timeDuration + " ms"); |
793 |
} |
794 |
|
795 |
_clearLatestFrames = true; |
796 |
} |
797 |
|
798 |
return true; |
799 |
} |
800 |
|
801 |
private bool CreateCoordMapperShader(KinectInterop.SensorData sensorData, bool bColor2Depth) |
802 |
{ |
803 |
if (_depthCameraIntrinsics == null || _colorCameraIntrinsics == null || _coordinateMapper == null) |
804 |
return false; |
805 |
|
806 |
System.Numerics.Matrix4x4? matrix = !bColor2Depth ? _coordinateMapper.DepthToColorMatrix : _coordinateMapper.ColorToDepthMatrix; |
807 |
if (_coordMapperShader == null) |
808 |
{ |
809 |
_coordMapperShader = matrix.HasValue ? Resources.Load("CoordMapper") as ComputeShader : null; |
810 |
} |
811 |
|
812 |
if (_coordMapperShader) |
813 |
{ |
814 |
_depth2colorKernel = _coordMapperShader.FindKernel("MapDepthFrame2ColorFrame"); |
815 |
_color2depthKernel = _coordMapperShader.FindKernel("MapColorSpace2DepthFrame"); |
816 |
|
817 |
float[] depthFocalLength = new float[] { _depthCameraIntrinsics.FocalLengthX, _depthCameraIntrinsics.FocalLengthY }; |
818 |
float[] depthPrincipalPoint = new float[] { _depthCameraIntrinsics.PrincipalPointX, _depthCameraIntrinsics.PrincipalPointY }; |
819 |
float[] depthRadialDistortion = new float[] { _depthCameraIntrinsics.RadialDistortionSecondOrder, _depthCameraIntrinsics.RadialDistortionFourthOrder, _depthCameraIntrinsics.RadialDistortionSixthOrder }; |
820 |
|
821 |
_coordMapperShader.SetFloats("depthFocalLength", depthFocalLength); |
822 |
_coordMapperShader.SetFloats("depthPrincipalPoint", depthPrincipalPoint); |
823 |
_coordMapperShader.SetFloats("depthRadialDistortion", depthRadialDistortion); |
824 |
|
825 |
float[] colorFocalLength = new float[] { _colorCameraIntrinsics.FocalLengthX, _colorCameraIntrinsics.FocalLengthY }; |
826 |
float[] colorPrincipalPoint = new float[] { _colorCameraIntrinsics.PrincipalPointX, _colorCameraIntrinsics.PrincipalPointY }; |
827 |
float[] colorRadialDistortion = new float[] { _colorCameraIntrinsics.RadialDistortionSecondOrder, _colorCameraIntrinsics.RadialDistortionFourthOrder, _colorCameraIntrinsics.RadialDistortionSixthOrder }; |
828 |
|
829 |
_coordMapperShader.SetFloats("colorFocalLength", colorFocalLength); |
830 |
_coordMapperShader.SetFloats("colorPrincipalPoint", colorPrincipalPoint); |
831 |
_coordMapperShader.SetFloats("colorRadialDistortion", colorRadialDistortion); |
832 |
|
833 |
float[] space2spaceMat = new float[] { |
834 |
matrix.Value.M11, matrix.Value.M12, matrix.Value.M13, matrix.Value.M14, |
835 |
matrix.Value.M21, matrix.Value.M22, matrix.Value.M23, matrix.Value.M24, |
836 |
matrix.Value.M31, matrix.Value.M32, matrix.Value.M33, matrix.Value.M34, |
837 |
matrix.Value.M41, matrix.Value.M42, matrix.Value.M43, matrix.Value.M44 |
838 |
}; |
839 |
|
840 |
if (!bColor2Depth) |
841 |
{ |
842 |
_coordMapperShader.SetFloats("depth2colorMat", space2spaceMat); |
843 |
} |
844 |
else |
845 |
{ |
846 |
_coordMapperShader.SetFloats("color2depthMat", space2spaceMat); |
847 |
} |
848 |
|
849 |
// compute buffers |
850 |
int depthImageLength = sensorData.depthImageWidth * sensorData.depthImageHeight; |
851 |
|
852 |
if(_depthDepthValuesBuf == null) |
853 |
{ |
854 |
_depthDepthValuesBuf = new ComputeBuffer(depthImageLength, sizeof(int)); |
855 |
_coordMapperShader.SetBuffer(_depth2colorKernel, "depthDepthValues", _depthDepthValuesBuf); |
856 |
} |
857 |
|
858 |
if (!bColor2Depth) |
859 |
{ |
860 |
_depthPlaneCoordsBuf = new ComputeBuffer(depthImageLength, 2 * sizeof(float)); |
861 |
_colorPlaneCoordsBuf = new ComputeBuffer(depthImageLength, 2 * sizeof(float)); |
862 |
|
863 |
// set plane coords |
864 |
Vector2[] depthPlaneCoords = new Vector2[depthImageLength]; |
865 |
for (int dy = 0, di = 0; dy < sensorData.depthImageHeight; dy++) |
866 |
{ |
867 |
for (int dx = 0; dx < sensorData.depthImageWidth; dx++) |
868 |
{ |
869 |
depthPlaneCoords[di] = new Vector2(dx, dy); |
870 |
di++; |
871 |
} |
872 |
} |
873 |
|
874 |
_depthPlaneCoordsBuf.SetData(depthPlaneCoords); |
875 |
_coordMapperShader.SetBuffer(_depth2colorKernel, "depthPlaneCoords", _depthPlaneCoordsBuf); |
876 |
_coordMapperShader.SetBuffer(_depth2colorKernel, "colorPlaneCoords", _colorPlaneCoordsBuf); |
877 |
} |
878 |
else |
879 |
{ |
880 |
int colorImageLength = sensorData.colorImageWidth * sensorData.colorImageHeight; |
881 |
|
882 |
_colorSpaceCoordsBuf = new ComputeBuffer(colorImageLength, 3 * sizeof(float)); |
883 |
_colorDepthCoordsBuf = new ComputeBuffer(colorImageLength, 2 * sizeof(float)); |
884 |
|
885 |
_coordMapperShader.SetBuffer(_color2depthKernel, "colorSpaceCoords", _colorSpaceCoordsBuf); |
886 |
_coordMapperShader.SetBuffer(_color2depthKernel, "colorDepthCoords", _colorDepthCoordsBuf); |
887 |
} |
888 |
} |
889 |
|
890 |
return (_coordMapperShader != null); |
891 |
} |
892 |
|
893 |
public bool MapColorFrameToDepthCoords(KinectInterop.SensorData sensorData, ref Vector2[] vDepthCoords) |
894 |
{ |
895 |
if (_coordMapperShader == null || _colorDepthCoordsBuf == null) |
896 |
{ |
897 |
CreateCoordMapperShader(sensorData, true); |
898 |
} |
899 |
|
900 |
bool bReadyToMap = _saveLatestFrames ? (_latestColorFrame != null && _latestDepthFrame != null && _latestBodyFrame != null) : |
901 |
sensorData.depthImage != null && sensorData.colorImage != null; |
902 |
|
903 |
if (bReadyToMap) |
904 |
{ |
905 |
//long timeStamp = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; |
906 |
|
907 |
if (_coordMapperShader && _coordinateMapper2 != null && |
908 |
_coordinateMapper2.MapColorFrameToDepthSpace(_latestColorFrame, _latestDepthFrame, ref _color2SpacePoints)) |
909 |
{ |
910 |
//long timeDuration = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond - timeStamp); |
911 |
//Debug.Log("mapColorFrameToDepthSpace() took " + timeDuration + " ms"); |
912 |
|
913 |
int pointArrayLength = _color2SpacePoints.Length; |
914 |
|
915 |
//timeStamp = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; |
916 |
|
917 |
_colorSpaceCoordsBuf.SetData(_color2SpacePoints); |
918 |
//_coordMapperShader.SetBuffer(_color2depthKernel, "colorSpaceCoords", _colorSpaceCoordsBuf); |
919 |
|
920 |
_coordMapperShader.Dispatch(_color2depthKernel, pointArrayLength / 64, 1, 1); |
921 |
|
922 |
if (vDepthCoords == null || vDepthCoords.Length != pointArrayLength) |
923 |
{ |
924 |
vDepthCoords = new Vector2[pointArrayLength]; |
925 |
} |
926 |
|
927 |
_colorDepthCoordsBuf.GetData(vDepthCoords); |
928 |
|
929 |
//timeDuration = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond - timeStamp); |
930 |
//Debug.Log("color2DepthTask() took " + timeDuration + " ms"); |
931 |
} |
932 |
|
933 |
_clearLatestFrames = true; |
934 |
} |
935 |
|
936 |
return true; |
937 |
} |
938 |
|
939 |
public int GetJointIndex (KinectInterop.JointType joint) |
940 |
{ |
941 |
return (int)joint; |
942 |
} |
943 |
|
944 |
public KinectInterop.JointType GetParentJoint (KinectInterop.JointType joint) |
945 |
{ |
946 |
switch(joint) |
947 |
{ |
948 |
case KinectInterop.JointType.SpineBase: |
949 |
return KinectInterop.JointType.SpineBase; |
950 |
|
951 |
case KinectInterop.JointType.Neck: |
952 |
return KinectInterop.JointType.SpineShoulder; |
953 |
|
954 |
case KinectInterop.JointType.SpineShoulder: |
955 |
return KinectInterop.JointType.SpineMid; |
956 |
|
957 |
case KinectInterop.JointType.ShoulderLeft: |
958 |
case KinectInterop.JointType.ShoulderRight: |
959 |
return KinectInterop.JointType.SpineShoulder; |
960 |
|
961 |
case KinectInterop.JointType.HipLeft: |
962 |
case KinectInterop.JointType.HipRight: |
963 |
return KinectInterop.JointType.SpineBase; |
964 |
|
965 |
case KinectInterop.JointType.HandTipLeft: |
966 |
return KinectInterop.JointType.HandLeft; |
967 |
|
968 |
case KinectInterop.JointType.ThumbLeft: |
969 |
return KinectInterop.JointType.WristLeft; |
970 |
|
971 |
case KinectInterop.JointType.HandTipRight: |
972 |
return KinectInterop.JointType.HandRight; |
973 |
|
974 |
case KinectInterop.JointType.ThumbRight: |
975 |
return KinectInterop.JointType.WristRight; |
976 |
} |
977 |
|
978 |
return (KinectInterop.JointType)((int)joint - 1); |
979 |
} |
980 |
|
981 |
public KinectInterop.JointType GetNextJoint (KinectInterop.JointType joint) |
982 |
{ |
983 |
switch(joint) |
984 |
{ |
985 |
case KinectInterop.JointType.SpineBase: |
986 |
return KinectInterop.JointType.SpineMid; |
987 |
case KinectInterop.JointType.SpineMid: |
988 |
return KinectInterop.JointType.SpineShoulder; |
989 |
case KinectInterop.JointType.SpineShoulder: |
990 |
return KinectInterop.JointType.Neck; |
991 |
case KinectInterop.JointType.Neck: |
992 |
return KinectInterop.JointType.Head; |
993 |
|
994 |
case KinectInterop.JointType.ShoulderLeft: |
995 |
return KinectInterop.JointType.ElbowLeft; |
996 |
case KinectInterop.JointType.ElbowLeft: |
997 |
return KinectInterop.JointType.WristLeft; |
998 |
case KinectInterop.JointType.WristLeft: |
999 |
return KinectInterop.JointType.HandLeft; |
1000 |
case KinectInterop.JointType.HandLeft: |
1001 |
return KinectInterop.JointType.HandTipLeft; |
1002 |
|
1003 |
case KinectInterop.JointType.ShoulderRight: |
1004 |
return KinectInterop.JointType.ElbowRight; |
1005 |
case KinectInterop.JointType.ElbowRight: |
1006 |
return KinectInterop.JointType.WristRight; |
1007 |
case KinectInterop.JointType.WristRight: |
1008 |
return KinectInterop.JointType.HandRight; |
1009 |
case KinectInterop.JointType.HandRight: |
1010 |
return KinectInterop.JointType.HandTipRight; |
1011 |
|
1012 |
case KinectInterop.JointType.HipLeft: |
1013 |
return KinectInterop.JointType.KneeLeft; |
1014 |
case KinectInterop.JointType.KneeLeft: |
1015 |
return KinectInterop.JointType.AnkleLeft; |
1016 |
case KinectInterop.JointType.AnkleLeft: |
1017 |
return KinectInterop.JointType.FootLeft; |
1018 |
|
1019 |
case KinectInterop.JointType.HipRight: |
1020 |
return KinectInterop.JointType.KneeRight; |
1021 |
case KinectInterop.JointType.KneeRight: |
1022 |
return KinectInterop.JointType.AnkleRight; |
1023 |
case KinectInterop.JointType.AnkleRight: |
1024 |
return KinectInterop.JointType.FootRight; |
1025 |
} |
1026 |
|
1027 |
return joint; // in case of end joint - Head, HandTipLeft, HandTipRight, FootLeft, FootRight |
1028 |
} |
1029 |
|
1030 |
public bool IsFaceTrackingAvailable (ref bool bNeedRestart) |
1031 |
{ |
1032 |
return false; |
1033 |
} |
1034 |
|
1035 |
public bool InitFaceTracking (bool bUseFaceModel, bool bDrawFaceRect) |
1036 |
{ |
1037 |
return false; |
1038 |
} |
1039 |
|
1040 |
public void FinishFaceTracking () |
1041 |
{ |
1042 |
} |
1043 |
|
1044 |
public bool UpdateFaceTracking () |
1045 |
{ |
1046 |
return false; |
1047 |
} |
1048 |
|
1049 |
public bool IsFaceTrackingActive () |
1050 |
{ |
1051 |
return false; |
1052 |
} |
1053 |
|
1054 |
public bool IsDrawFaceRect () |
1055 |
{ |
1056 |
return false; |
1057 |
} |
1058 |
|
1059 |
public bool IsFaceTracked (long userId) |
1060 |
{ |
1061 |
return false; |
1062 |
} |
1063 |
|
1064 |
public bool GetFaceRect (long userId, ref Rect faceRect) |
1065 |
{ |
1066 |
return false; |
1067 |
} |
1068 |
|
1069 |
public void VisualizeFaceTrackerOnColorTex (Texture2D texColor) |
1070 |
{ |
1071 |
} |
1072 |
|
1073 |
public bool GetHeadPosition (long userId, ref Vector3 headPos) |
1074 |
{ |
1075 |
return false; |
1076 |
} |
1077 |
|
1078 |
public bool GetHeadRotation (long userId, ref Quaternion headRot) |
1079 |
{ |
1080 |
return false; |
1081 |
} |
1082 |
|
1083 |
public bool GetAnimUnits (long userId, ref System.Collections.Generic.Dictionary<KinectInterop.FaceShapeAnimations, float> afAU) |
1084 |
{ |
1085 |
return false; |
1086 |
} |
1087 |
|
1088 |
public bool GetShapeUnits (long userId, ref System.Collections.Generic.Dictionary<KinectInterop.FaceShapeDeformations, float> afSU) |
1089 |
{ |
1090 |
return false; |
1091 |
} |
1092 |
|
1093 |
public int GetFaceModelVerticesCount (long userId) |
1094 |
{ |
1095 |
return 0; |
1096 |
} |
1097 |
|
1098 |
public bool GetFaceModelVertices (long userId, ref Vector3[] avVertices) |
1099 |
{ |
1100 |
return false; |
1101 |
} |
1102 |
|
1103 |
public int GetFaceModelTrianglesCount () |
1104 |
{ |
1105 |
return 0; |
1106 |
} |
1107 |
|
1108 |
public bool GetFaceModelTriangles (bool bMirrored, ref int[] avTriangles) |
1109 |
{ |
1110 |
return false; |
1111 |
} |
1112 |
|
1113 |
public bool IsSpeechRecognitionAvailable (ref bool bNeedRestart) |
1114 |
{ |
1115 |
return true; |
1116 |
} |
1117 |
|
1118 |
public int InitSpeechRecognition (string sRecoCriteria, bool bUseKinect, bool bAdaptationOff) |
1119 |
{ |
1120 |
speechRecognizer = new SpeechRecognizer(); |
1121 |
return 0; |
1122 |
} |
1123 |
|
1124 |
public void FinishSpeechRecognition () |
1125 |
{ |
1126 |
if(speechRecognizer != null) |
1127 |
{ |
1128 |
speechRecognizer.Dispose(); |
1129 |
speechRecognizer = null; |
1130 |
} |
1131 |
} |
1132 |
|
1133 |
public int UpdateSpeechRecognition () |
1134 |
{ |
1135 |
if(speechRecognizer != null) |
1136 |
{ |
1137 |
if(speechRecognizeTask == null) |
1138 |
{ |
1139 |
// UnityEngine.WSA.Application.InvokeOnUIThread(() => |
1140 |
// { |
1141 |
speechRecognizeTask = RecognizeSpeechAsync(); |
1142 |
// }, true); |
1143 |
} |
1144 |
|
1145 |
if (speechRecognizeTask != null) |
1146 |
{ |
1147 |
// check for error |
1148 |
if (speechRecognizeTask.IsFaulted) |
1149 |
{ |
1150 |
Debug.LogError("RecognizeSpeechAsync() has faulted."); |
1151 |
if (speechRecognizeTask.Exception != null) |
1152 |
Debug.LogError(speechRecognizeTask.Exception); |
1153 |
|
1154 |
speechRecognizeTask = null; |
1155 |
} |
1156 |
else if(speechRecognizeTask.IsCanceled) |
1157 |
{ |
1158 |
speechRecognizeTask = null; |
1159 |
} |
1160 |
else if(speechRecognizeTask.IsCompleted) |
1161 |
{ |
1162 |
SpeechRecognitionResult result = speechRecognizeTask.Result; |
1163 |
|
1164 |
if(result.Status == SpeechRecognitionResultStatus.Success) |
1165 |
{ |
1166 |
if(result.Confidence != SpeechRecognitionConfidence.Rejected) |
1167 |
{ |
1168 |
//Debug.LogError("Phrase: " + result.Text + ", Confidence: " + result.Confidence.ToString() + ", RawConf: " + result.RawConfidence); |
1169 |
|
1170 |
float fConfidence = (float)result.RawConfidence; // (3f - (float)result.Confidence) / 3f; |
1171 |
if(fConfidence >= requiredPhraseConfidence) |
1172 |
{ |
1173 |
isPhraseRecognized = true; |
1174 |
recognizedPhraseTag = result.SemanticInterpretation.Properties.ContainsKey("<ROOT>") ? |
1175 |
result.SemanticInterpretation.Properties["<ROOT>"][0] : result.Text; |
1176 |
recognizedPhraseConfidence = fConfidence; |
1177 |
} |
1178 |
} |
1179 |
} |
1180 |
//else |
1181 |
//{ |
1182 |
// Debug.LogError("Speech recognition failed: " + result.Status.ToString()); |
1183 |
//} |
1184 |
|
1185 |
speechRecognizeTask = null; |
1186 |
} |
1187 |
} |
1188 |
} |
1189 |
|
1190 |
return 0; |
1191 |
} |
1192 |
|
1193 |
private async Task<SpeechRecognitionResult> RecognizeSpeechAsync() |
1194 |
{ |
1195 |
SpeechRecognitionResult result = null; |
1196 |
|
1197 |
if (speechRecognizer != null) |
1198 |
{ |
1199 |
result = await speechRecognizer.RecognizeAsync(); |
1200 |
} |
1201 |
|
1202 |
return result; |
1203 |
} |
1204 |
|
1205 |
|
1206 |
public int LoadSpeechGrammar (string sFileName, short iLangCode, bool bDynamic) |
1207 |
{ |
1208 |
Task<int> task = null; |
1209 |
|
1210 |
// UnityEngine.WSA.Application.InvokeOnUIThread(() => |
1211 |
// { |
1212 |
task = LoadGrammarFileAsync(sFileName); |
1213 |
// }, true); |
1214 |
|
1215 |
while (task != null && !task.IsCompleted && !task.IsFaulted) |
1216 |
{ |
1217 |
task.Wait(100); |
1218 |
} |
1219 |
|
1220 |
if(task.IsFaulted) |
1221 |
{ |
1222 |
Debug.LogError("LoadGrammarFileAsync() has faulted."); |
1223 |
if (task.Exception != null) |
1224 |
Debug.LogError(task.Exception); |
1225 |
|
1226 |
return -1; |
1227 |
} |
1228 |
|
1229 |
return 0; |
1230 |
} |
1231 |
|
1232 |
private async Task<int> LoadGrammarFileAsync(string sFileName) |
1233 |
{ |
1234 |
if(speechRecognizer != null) |
1235 |
{ |
1236 |
string sUrl = "ms-appdata:///local/" + sFileName; |
1237 |
var storageFile = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri(sUrl)); |
1238 |
var grammarFile = new SpeechRecognitionGrammarFileConstraint(storageFile, sFileName); |
1239 |
|
1240 |
speechRecognizer.Constraints.Add(grammarFile); |
1241 |
await speechRecognizer.CompileConstraintsAsync(); |
1242 |
} |
1243 |
|
1244 |
return 0; |
1245 |
} |
1246 |
|
1247 |
public int AddGrammarPhrase (string sFromRule, string sToRule, string sPhrase, bool bClearRulePhrases, bool bCommitGrammar) |
1248 |
{ |
1249 |
return -1; |
1250 |
} |
1251 |
|
1252 |
public void SetSpeechConfidence (float fConfidence) |
1253 |
{ |
1254 |
requiredPhraseConfidence = fConfidence; |
1255 |
} |
1256 |
|
1257 |
public bool IsSpeechStarted () |
1258 |
{ |
1259 |
return speechRecognizer != null ? speechRecognizer.State == SpeechRecognizerState.SoundStarted : false; |
1260 |
} |
1261 |
|
1262 |
public bool IsSpeechEnded () |
1263 |
{ |
1264 |
return speechRecognizer != null ? speechRecognizer.State == SpeechRecognizerState.SoundEnded : false; |
1265 |
} |
1266 |
|
1267 |
public bool IsPhraseRecognized () |
1268 |
{ |
1269 |
return isPhraseRecognized; |
1270 |
} |
1271 |
|
1272 |
public float GetPhraseConfidence () |
1273 |
{ |
1274 |
return recognizedPhraseConfidence; |
1275 |
} |
1276 |
|
1277 |
public string GetRecognizedPhraseTag () |
1278 |
{ |
1279 |
return recognizedPhraseTag; |
1280 |
} |
1281 |
|
1282 |
public void ClearRecognizedPhrase () |
1283 |
{ |
1284 |
isPhraseRecognized = false; |
1285 |
recognizedPhraseTag = string.Empty; |
1286 |
recognizedPhraseConfidence = 0f; |
1287 |
} |
1288 |
|
1289 |
public bool IsBackgroundRemovalAvailable (ref bool bNeedRestart) |
1290 |
{ |
1291 |
return true; |
1292 |
} |
1293 |
|
1294 |
public bool InitBackgroundRemoval (KinectInterop.SensorData sensorData, bool isHiResPrefered) |
1295 |
{ |
1296 |
_backgroundRemovalInited = KinectInterop.InitBackgroundRemoval(sensorData, isHiResPrefered); |
1297 |
return _backgroundRemovalInited; |
1298 |
} |
1299 |
|
1300 |
public void FinishBackgroundRemoval (KinectInterop.SensorData sensorData) |
1301 |
{ |
1302 |
KinectInterop.FinishBackgroundRemoval(sensorData); |
1303 |
_backgroundRemovalInited = false; |
1304 |
} |
1305 |
|
1306 |
public bool UpdateBackgroundRemoval (KinectInterop.SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bAlphaTexOnly) |
1307 |
{ |
1308 |
return KinectInterop.UpdateBackgroundRemoval(sensorData, isHiResPrefered, defaultColor, bAlphaTexOnly); |
1309 |
} |
1310 |
|
1311 |
public bool IsBackgroundRemovalActive () |
1312 |
{ |
1313 |
return _backgroundRemovalInited; |
1314 |
} |
1315 |
|
1316 |
public bool IsBRHiResSupported () |
1317 |
{ |
1318 |
return true; |
1319 |
} |
1320 |
|
1321 |
public Rect GetForegroundFrameRect (KinectInterop.SensorData sensorData, bool isHiResPrefered) |
1322 |
{ |
1323 |
return KinectInterop.GetForegroundFrameRect(sensorData, isHiResPrefered); |
1324 |
} |
1325 |
|
1326 |
public int GetForegroundFrameLength (KinectInterop.SensorData sensorData, bool isHiResPrefered) |
1327 |
{ |
1328 |
return KinectInterop.GetForegroundFrameLength(sensorData, isHiResPrefered); |
1329 |
} |
1330 |
|
1331 |
public bool PollForegroundFrame (KinectInterop.SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bLimitedUsers, System.Collections.Generic.ICollection<int> alTrackedIndexes, ref byte[] foregroundImage) |
1332 |
{ |
1333 |
return KinectInterop.PollForegroundFrame(sensorData, isHiResPrefered, defaultColor, bLimitedUsers, alTrackedIndexes, ref foregroundImage); |
1334 |
} |
1335 |
|
1336 |
} |
1337 |
#endif |