t1 / TFDContents / Assets / KinectScripts / KinectInterop.cs @ 3
이력 | 보기 | 이력해설 | 다운로드 (88.4 KB)
1 |
using UnityEngine; |
---|---|
2 |
//using Windows.Kinect; |
3 |
|
4 |
using System.Collections; |
5 |
using System.Collections.Generic; |
6 |
using System.Runtime.CompilerServices; |
7 |
using System.Runtime.InteropServices; |
8 |
using System; |
9 |
using System.IO; |
10 |
using System.Text; |
11 |
#if !UNITY_WSA |
12 |
using ICSharpCode.SharpZipLib.Zip; |
13 |
#endif |
14 |
//using OpenCvSharp; |
15 |
using UnityEngine.SceneManagement; |
16 |
|
17 |
|
18 |
/// <summary> |
19 |
/// KinectInterop is a class containing utility and interop functions, that call the proper sensor interface. |
20 |
/// </summary> |
21 |
public class KinectInterop |
22 |
{ |
23 |
// order of depth sensor interfaces |
24 |
// public static Type[] SensorInterfaceOrder = new Type[] { |
25 |
// typeof(Kinect2Interface), typeof(Kinect1Interface), typeof(OpenNI2Interface) |
26 |
// }; |
27 |
public static DepthSensorInterface[] SensorInterfaceOrder = new DepthSensorInterface[] { |
28 |
#if UNITY_WSA_10_0 && NETFX_CORE |
29 |
new Kinect2UwpInterface() |
30 |
#else |
31 |
new Kinect2Interface(), new Kinect1Interface()/**, new OpenNI2Interface()*/ |
32 |
#endif |
33 |
}; |
34 |
|
35 |
// graphics shader level |
36 |
private static int graphicsShaderLevel = 0; |
37 |
|
38 |
|
39 |
/// <summary> |
40 |
/// Constants used by this class and other K2-components |
41 |
/// </summary> |
42 |
public static class Constants |
43 |
{ |
44 |
public const int MaxBodyCount = 6; |
45 |
public const int MaxJointCount = 25; |
46 |
|
47 |
public const float MinTimeBetweenSameGestures = 0.0f; |
48 |
public const float PoseCompleteDuration = 1.0f; |
49 |
public const float ClickMaxDistance = 0.05f; |
50 |
public const float ClickStayDuration = 2.0f; |
51 |
} |
52 |
|
53 |
// Types of depth sensor platforms |
54 |
public enum DepthSensorPlatform : int |
55 |
{ |
56 |
None = 0, |
57 |
KinectSDKv1 = 1, |
58 |
KinectSDKv2 = 2, |
59 |
OpenNIv2 = 3, |
60 |
RealSense = 4, |
61 |
KinectUWPv2 = 5, |
62 |
|
63 |
DummyK1 = 101, |
64 |
DummyK2 = 102 |
65 |
} |
66 |
|
67 |
// Data structures for interfacing C# with the native wrappers |
68 |
|
69 |
[Flags] |
70 |
public enum FrameSource : uint |
71 |
{ |
72 |
TypeNone = 0x0, |
73 |
TypeColor = 0x1, |
74 |
TypeInfrared = 0x2, |
75 |
TypeDepth = 0x8, |
76 |
TypeBodyIndex = 0x10, |
77 |
TypeBody = 0x20, |
78 |
TypeAudio = 0x40 |
79 |
} |
80 |
|
81 |
public enum JointType : int |
82 |
{ |
83 |
SpineBase = 0, |
84 |
SpineMid = 1, |
85 |
Neck = 2, |
86 |
Head = 3, |
87 |
ShoulderLeft = 4, |
88 |
ElbowLeft = 5, |
89 |
WristLeft = 6, |
90 |
HandLeft = 7, |
91 |
ShoulderRight = 8, |
92 |
ElbowRight = 9, |
93 |
WristRight = 10, |
94 |
HandRight = 11, |
95 |
HipLeft = 12, |
96 |
KneeLeft = 13, |
97 |
AnkleLeft = 14, |
98 |
FootLeft = 15, |
99 |
HipRight = 16, |
100 |
KneeRight = 17, |
101 |
AnkleRight = 18, |
102 |
FootRight = 19, |
103 |
SpineShoulder = 20, |
104 |
HandTipLeft = 21, |
105 |
ThumbLeft = 22, |
106 |
HandTipRight = 23, |
107 |
ThumbRight = 24 |
108 |
//Count = 25 |
109 |
} |
110 |
|
111 |
public static readonly Vector3[] JointBaseDir = |
112 |
{ |
113 |
Vector3.zero, |
114 |
Vector3.up, |
115 |
Vector3.up, |
116 |
Vector3.up, |
117 |
Vector3.left, |
118 |
Vector3.left, |
119 |
Vector3.left, |
120 |
Vector3.left, |
121 |
Vector3.right, |
122 |
Vector3.right, |
123 |
Vector3.right, |
124 |
Vector3.right, |
125 |
Vector3.down, |
126 |
Vector3.down, |
127 |
Vector3.down, |
128 |
Vector3.forward, |
129 |
Vector3.down, |
130 |
Vector3.down, |
131 |
Vector3.down, |
132 |
Vector3.forward, |
133 |
Vector3.up, |
134 |
Vector3.left, |
135 |
Vector3.forward, |
136 |
Vector3.right, |
137 |
Vector3.forward |
138 |
}; |
139 |
|
140 |
public enum TrackingState |
141 |
{ |
142 |
NotTracked = 0, |
143 |
Inferred = 1, |
144 |
Tracked = 2 |
145 |
} |
146 |
|
147 |
public enum HandState |
148 |
{ |
149 |
Unknown = 0, |
150 |
NotTracked = 1, |
151 |
Open = 2, |
152 |
Closed = 3, |
153 |
Lasso = 4 |
154 |
} |
155 |
|
156 |
public enum TrackingConfidence |
157 |
{ |
158 |
Low = 0, |
159 |
High = 1 |
160 |
} |
161 |
|
162 |
// [Flags] |
163 |
// public enum ClippedEdges |
164 |
// { |
165 |
// None = 0, |
166 |
// Right = 1, |
167 |
// Left = 2, |
168 |
// Top = 4, |
169 |
// Bottom = 8 |
170 |
// } |
171 |
|
172 |
public enum FaceShapeAnimations : int |
173 |
{ |
174 |
JawOpen =0, |
175 |
LipPucker =1, |
176 |
JawSlideRight =2, |
177 |
LipStretcherRight =3, |
178 |
LipStretcherLeft =4, |
179 |
LipCornerPullerLeft =5, |
180 |
LipCornerPullerRight =6, |
181 |
LipCornerDepressorLeft =7, |
182 |
LipCornerDepressorRight =8, |
183 |
LeftcheekPuff =9, |
184 |
RightcheekPuff =10, |
185 |
LefteyeClosed =11, |
186 |
RighteyeClosed =12, |
187 |
RighteyebrowLowerer =13, |
188 |
LefteyebrowLowerer =14, |
189 |
LowerlipDepressorLeft =15, |
190 |
LowerlipDepressorRight =16, |
191 |
} |
192 |
|
193 |
public enum FaceShapeDeformations : int |
194 |
{ |
195 |
PCA01 =0, |
196 |
PCA02 =1, |
197 |
PCA03 =2, |
198 |
PCA04 =3, |
199 |
PCA05 =4, |
200 |
PCA06 =5, |
201 |
PCA07 =6, |
202 |
PCA08 =7, |
203 |
PCA09 =8, |
204 |
PCA10 =9, |
205 |
Chin03 =10, |
206 |
Forehead00 =11, |
207 |
Cheeks02 =12, |
208 |
Cheeks01 =13, |
209 |
MouthBag01 =14, |
210 |
MouthBag02 =15, |
211 |
Eyes02 =16, |
212 |
MouthBag03 =17, |
213 |
Forehead04 =18, |
214 |
Nose00 =19, |
215 |
Nose01 =20, |
216 |
Nose02 =21, |
217 |
MouthBag06 =22, |
218 |
MouthBag05 =23, |
219 |
Cheeks00 =24, |
220 |
Mask03 =25, |
221 |
Eyes03 =26, |
222 |
Nose03 =27, |
223 |
Eyes08 =28, |
224 |
MouthBag07 =29, |
225 |
Eyes00 =30, |
226 |
Nose04 =31, |
227 |
Mask04 =32, |
228 |
Chin04 =33, |
229 |
Forehead05 =34, |
230 |
Eyes06 =35, |
231 |
Eyes11 =36, |
232 |
Nose05 =37, |
233 |
Mouth07 =38, |
234 |
Cheeks08 =39, |
235 |
Eyes09 =40, |
236 |
Mask10 =41, |
237 |
Mouth09 =42, |
238 |
Nose07 =43, |
239 |
Nose08 =44, |
240 |
Cheeks07 =45, |
241 |
Mask07 =46, |
242 |
MouthBag09 =47, |
243 |
Nose06 =48, |
244 |
Chin02 =49, |
245 |
Eyes07 =50, |
246 |
Cheeks10 =51, |
247 |
Rim20 =52, |
248 |
Mask22 =53, |
249 |
MouthBag15 =54, |
250 |
Chin01 =55, |
251 |
Cheeks04 =56, |
252 |
Eyes17 =57, |
253 |
Cheeks13 =58, |
254 |
Mouth02 =59, |
255 |
MouthBag12 =60, |
256 |
Mask19 =61, |
257 |
Mask20 =62, |
258 |
Forehead06 =63, |
259 |
Mouth13 =64, |
260 |
Mask25 =65, |
261 |
Chin05 =66, |
262 |
Cheeks20 =67, |
263 |
Nose09 =68, |
264 |
Nose10 =69, |
265 |
MouthBag27 =70, |
266 |
Mouth11 =71, |
267 |
Cheeks14 =72, |
268 |
Eyes16 =73, |
269 |
Mask29 =74, |
270 |
Nose15 =75, |
271 |
Cheeks11 =76, |
272 |
Mouth16 =77, |
273 |
Eyes19 =78, |
274 |
Mouth17 =79, |
275 |
MouthBag36 =80, |
276 |
Mouth15 =81, |
277 |
Cheeks25 =82, |
278 |
Cheeks16 =83, |
279 |
Cheeks18 =84, |
280 |
Rim07 =85, |
281 |
Nose13 =86, |
282 |
Mouth18 =87, |
283 |
Cheeks19 =88, |
284 |
Rim21 =89, |
285 |
Mouth22 =90, |
286 |
Nose18 =91, |
287 |
Nose16 =92, |
288 |
Rim22 =93, |
289 |
} |
290 |
|
291 |
|
292 |
/// <summary> |
293 |
/// Container for the sensor data, including color, depth, ir and body frames. |
294 |
/// </summary> |
295 |
public class SensorData |
296 |
{ |
297 |
public DepthSensorInterface sensorInterface; |
298 |
public DepthSensorPlatform sensorIntPlatform; |
299 |
|
300 |
public int bodyCount; |
301 |
public int jointCount; |
302 |
|
303 |
public float depthCameraOffset; |
304 |
public float depthCameraFOV; |
305 |
public float colorCameraFOV; |
306 |
public float faceOverlayOffset; |
307 |
|
308 |
public int colorImageWidth; |
309 |
public int colorImageHeight; |
310 |
|
311 |
public byte[] colorImage; |
312 |
public long lastColorFrameTime = 0; |
313 |
|
314 |
public int depthImageWidth; |
315 |
public int depthImageHeight; |
316 |
|
317 |
public ushort[] depthImage; |
318 |
public long lastDepthFrameTime = 0; |
319 |
|
320 |
public ushort[] infraredImage; |
321 |
public long lastInfraredFrameTime = 0; |
322 |
|
323 |
public byte[] bodyIndexImage; |
324 |
public long lastBodyIndexFrameTime = 0; |
325 |
|
326 |
public byte selectedBodyIndex = 255; |
327 |
|
328 |
public bool hintHeightAngle = false; |
329 |
public Quaternion sensorRotDetected = Quaternion.identity; |
330 |
public float sensorHgtDetected = 0f; |
331 |
|
332 |
public RenderTexture bodyIndexTexture; |
333 |
public Material bodyIndexMaterial; |
334 |
public ComputeBuffer bodyIndexBuffer; |
335 |
|
336 |
public float[] bodyIndexBufferData = null; |
337 |
public bool bodyIndexBufferReady = false; |
338 |
public object bodyIndexBufferLock = new object(); |
339 |
|
340 |
public RenderTexture depthImageTexture; |
341 |
public Material depthImageMaterial; |
342 |
public ComputeBuffer depthImageBuffer; |
343 |
public ComputeBuffer depthHistBuffer; |
344 |
|
345 |
public float[] depthImageBufferData = null; |
346 |
public int[] depthHistBufferData = null; |
347 |
public float[] equalHistBufferData = null; |
348 |
public int depthHistTotalPoints = 0; |
349 |
public int firstUserIndex = -1; |
350 |
|
351 |
public bool depthImageBufferReady = false; |
352 |
public object depthImageBufferLock = new object(); |
353 |
public bool depthCoordsBufferReady = false; |
354 |
public object depthCoordsBufferLock = new object(); |
355 |
public bool newDepthImage = false; |
356 |
|
357 |
public Texture2D colorImageTexture; |
358 |
|
359 |
public bool colorImageBufferReady = false; |
360 |
public object colorImageBufferLock = new object(); |
361 |
public bool newColorImage = false; |
362 |
|
363 |
public RenderTexture depth2ColorTexture; |
364 |
public Material depth2ColorMaterial; |
365 |
public ComputeBuffer depth2ColorBuffer; |
366 |
public Vector2[] depth2ColorCoords; |
367 |
|
368 |
public Vector3[] depth2SpaceCoords; |
369 |
public bool spaceCoordsBufferReady = false; |
370 |
public object spaceCoordsBufferLock = new object(); |
371 |
|
372 |
public bool backgroundRemovalInited = false; |
373 |
public bool backgroundRemovalHiRes = false; |
374 |
public bool invertAlphaColorMask = false; |
375 |
|
376 |
public RenderTexture color2DepthTexture; |
377 |
public Material color2DepthMaterial; |
378 |
public ComputeBuffer color2DepthBuffer; |
379 |
public Vector2[] color2DepthCoords; |
380 |
|
381 |
public RenderTexture alphaBodyTexture; |
382 |
public Material alphaBodyMaterial; |
383 |
public Material erodeBodyMaterial, dilateBodyMaterial, blurBodyMaterial; |
384 |
|
385 |
public int erodeIterations; |
386 |
public int dilateIterations; |
387 |
|
388 |
public RenderTexture colorBackgroundTexture; |
389 |
public Material colorBackgroundMaterial; |
390 |
|
391 |
public bool newInfraredImage = false; |
392 |
|
393 |
public bool bodyFrameReady = false; |
394 |
public object bodyFrameLock = new object(); |
395 |
public bool newBodyFrame = false; |
396 |
|
397 |
public bool isPlayModeEnabled; |
398 |
public string playModeData; |
399 |
public string playModeHandData; |
400 |
} |
401 |
|
402 |
/// <summary> |
403 |
/// Parameters used for smoothing of the body-joint positions between frames. |
404 |
/// </summary> |
405 |
public struct SmoothParameters |
406 |
{ |
407 |
public float smoothing; |
408 |
public float correction; |
409 |
public float prediction; |
410 |
public float jitterRadius; |
411 |
public float maxDeviationRadius; |
412 |
} |
413 |
|
414 |
/// <summary> |
415 |
/// Container for the body-joint data. |
416 |
/// </summary> |
417 |
public struct JointData |
418 |
{ |
419 |
// parameters filled in by the sensor interface |
420 |
//public JointType jointType; |
421 |
public TrackingState trackingState; |
422 |
public Vector3 kinectPos; |
423 |
public Vector3 position; |
424 |
public Quaternion orientation; // deprecated |
425 |
|
426 |
public Vector3 posPrev; |
427 |
public Vector3 posRel; |
428 |
public Vector3 posVel; |
429 |
|
430 |
// KM calculated parameters |
431 |
public Vector3 direction; |
432 |
public Quaternion normalRotation; |
433 |
public Quaternion mirroredRotation; |
434 |
|
435 |
// Constraint parameters |
436 |
public float lastAngle; |
437 |
} |
438 |
|
439 |
/// <summary> |
440 |
/// Container for the body data. |
441 |
/// </summary> |
442 |
public struct BodyData |
443 |
{ |
444 |
// parameters filled in by the sensor interface |
445 |
public Int64 liTrackingID; |
446 |
public Vector3 position; |
447 |
public Quaternion orientation; // deprecated |
448 |
|
449 |
public JointData[] joint; |
450 |
|
451 |
// KM calculated parameters |
452 |
public Quaternion normalRotation; |
453 |
public Quaternion mirroredRotation; |
454 |
|
455 |
public Vector3 hipsDirection; |
456 |
public Vector3 shouldersDirection; |
457 |
public float bodyTurnAngle; |
458 |
//public float bodyFullAngle; |
459 |
public bool isTurnedAround; |
460 |
public float turnAroundFactor; |
461 |
|
462 |
public Quaternion leftHandOrientation; |
463 |
public Quaternion rightHandOrientation; |
464 |
|
465 |
public Quaternion headOrientation; |
466 |
|
467 |
// public Vector3 leftArmDirection; |
468 |
// public Vector3 leftThumbForward; |
469 |
// public Vector3 leftThumbDirection; |
470 |
// //public float leftThumbAngle; |
471 |
// |
472 |
// public Vector3 rightArmDirection; |
473 |
// public Vector3 rightThumbForward; |
474 |
// public Vector3 rightThumbDirection; |
475 |
// //public float rightThumbAngle; |
476 |
|
477 |
//public Vector3 leftLegDirection; |
478 |
//public Vector3 leftFootDirection; |
479 |
//public Vector3 rightLegDirection; |
480 |
//public Vector3 rightFootDirection; |
481 |
|
482 |
public HandState leftHandState; |
483 |
public TrackingConfidence leftHandConfidence; |
484 |
public HandState rightHandState; |
485 |
public TrackingConfidence rightHandConfidence; |
486 |
|
487 |
public uint dwClippedEdges; |
488 |
public short bIsTracked; |
489 |
public short bIsRestricted; |
490 |
} |
491 |
|
492 |
/// <summary> |
493 |
/// Container for the body frame data. |
494 |
/// </summary> |
495 |
public struct BodyFrameData |
496 |
{ |
497 |
public Int64 liRelativeTime, liPreviousTime; |
498 |
[MarshalAsAttribute(UnmanagedType.ByValArray, SizeConst = 6, ArraySubType = UnmanagedType.Struct)] |
499 |
public BodyData[] bodyData; |
500 |
//public UnityEngine.Vector4 floorClipPlane; |
501 |
public bool bTurnAnalisys; |
502 |
|
503 |
public BodyFrameData(int bodyCount, int jointCount) |
504 |
{ |
505 |
liRelativeTime = liPreviousTime = 0; |
506 |
//floorClipPlane = UnityEngine.Vector4.zero; |
507 |
bTurnAnalisys = false; |
508 |
|
509 |
bodyData = new BodyData[bodyCount]; |
510 |
|
511 |
for(int i = 0; i < bodyCount; i++) |
512 |
{ |
513 |
bodyData[i].joint = new JointData[jointCount]; |
514 |
|
515 |
bodyData[i].leftHandOrientation = Quaternion.identity; |
516 |
bodyData[i].rightHandOrientation = Quaternion.identity; |
517 |
bodyData[i].headOrientation = Quaternion.identity; |
518 |
} |
519 |
} |
520 |
} |
521 |
|
522 |
|
523 |
// initializes the available sensor interfaces |
524 |
public static List<DepthSensorInterface> InitSensorInterfaces(bool bOnceRestarted, ref bool bNeedRestart) |
525 |
{ |
526 |
List<DepthSensorInterface> listInterfaces = new List<DepthSensorInterface>(); |
527 |
|
528 |
//var typeInterface = typeof(DepthSensorInterface); |
529 |
|
530 |
for(int pass = 0; pass <= 1; pass++) |
531 |
{ |
532 |
bool bCopyLibs = (pass != 0); |
533 |
|
534 |
//foreach(Type type in SensorInterfaceOrder) |
535 |
for(int i = 0; i < SensorInterfaceOrder.Length; i++) |
536 |
{ |
537 |
DepthSensorInterface sensorInt = SensorInterfaceOrder[i]; |
538 |
|
539 |
//if(typeInterface.IsAssignableFrom(type) && type != typeInterface) |
540 |
if(sensorInt != null) |
541 |
{ |
542 |
//DepthSensorInterface sensorInt = null; |
543 |
|
544 |
try |
545 |
{ |
546 |
//sensorInt = (DepthSensorInterface)Activator.CreateInstance(type); |
547 |
|
548 |
bool bIntNeedRestart = false; |
549 |
if(sensorInt.InitSensorInterface(bCopyLibs, ref bIntNeedRestart)) |
550 |
{ |
551 |
bNeedRestart |= bIntNeedRestart; |
552 |
} |
553 |
else |
554 |
{ |
555 |
sensorInt.FreeSensorInterface(bCopyLibs); |
556 |
sensorInt = null; |
557 |
|
558 |
continue; |
559 |
} |
560 |
|
561 |
if(!bNeedRestart && !sensorInt.IsSensorAvailable()) |
562 |
{ |
563 |
sensorInt.FreeSensorInterface(false); |
564 |
sensorInt = null; |
565 |
} |
566 |
} |
567 |
catch (Exception /**ex*/) |
568 |
{ |
569 |
//Debug.Log(ex); |
570 |
|
571 |
if(sensorInt != null) |
572 |
{ |
573 |
try |
574 |
{ |
575 |
sensorInt.FreeSensorInterface(bCopyLibs); |
576 |
} |
577 |
catch (Exception) |
578 |
{ |
579 |
// do nothing |
580 |
} |
581 |
finally |
582 |
{ |
583 |
sensorInt = null; |
584 |
} |
585 |
} |
586 |
} |
587 |
|
588 |
if(sensorInt != null) |
589 |
{ |
590 |
listInterfaces.Add(sensorInt); |
591 |
} |
592 |
} |
593 |
} |
594 |
|
595 |
if(listInterfaces.Count > 0) |
596 |
{ |
597 |
// we found working interface(s), don't go any further |
598 |
break; |
599 |
} |
600 |
|
601 |
if(bOnceRestarted) |
602 |
{ |
603 |
// we have restarted once, don't do it again |
604 |
break; |
605 |
} |
606 |
} |
607 |
|
608 |
return listInterfaces; |
609 |
} |
610 |
|
611 |
// opens the default sensor and needed readers |
612 |
public static SensorData OpenDefaultSensor(List<DepthSensorInterface> listInterfaces, FrameSource dwFlags, float sensorAngle, bool bUseMultiSource, |
613 |
KinectManager.UserMapType userMapType, BackgroundRemovalManager brManager) |
614 |
{ |
615 |
SensorData sensorData = null; |
616 |
if(listInterfaces == null) |
617 |
return sensorData; |
618 |
|
619 |
foreach(DepthSensorInterface sensorInt in listInterfaces) |
620 |
{ |
621 |
try |
622 |
{ |
623 |
if(sensorData == null) |
624 |
{ |
625 |
sensorData = sensorInt.OpenDefaultSensor(dwFlags, sensorAngle, bUseMultiSource); |
626 |
|
627 |
if(sensorData != null) |
628 |
{ |
629 |
sensorData.sensorInterface = sensorInt; |
630 |
sensorData.sensorIntPlatform = sensorInt.GetSensorPlatform(); |
631 |
Debug.Log("Interface used: " + sensorInt.GetType().Name); |
632 |
|
633 |
Debug.Log("Shader level: " + SystemInfo.graphicsShaderLevel); |
634 |
if(sensorData.bodyIndexImage != null && IsDirectX11Available()) |
635 |
{ |
636 |
Shader bodyIndexShader = Shader.Find("Kinect/BodyShader"); |
637 |
|
638 |
if(bodyIndexShader != null) |
639 |
{ |
640 |
if(sensorData.bodyIndexTexture == null || sensorData.bodyIndexTexture.width != sensorData.depthImageWidth || sensorData.bodyIndexTexture.height != sensorData.depthImageHeight) |
641 |
{ |
642 |
sensorData.bodyIndexTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0); |
643 |
sensorData.bodyIndexTexture.wrapMode = TextureWrapMode.Clamp; |
644 |
sensorData.bodyIndexTexture.filterMode = FilterMode.Point; |
645 |
//Debug.Log(sensorData.bodyIndexTexture.format); |
646 |
} |
647 |
|
648 |
sensorData.bodyIndexMaterial = new Material(bodyIndexShader); |
649 |
|
650 |
sensorData.bodyIndexMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth); |
651 |
sensorData.bodyIndexMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight); |
652 |
|
653 |
sensorData.bodyIndexBuffer = new ComputeBuffer(sensorData.bodyIndexImage.Length, sizeof(float)); |
654 |
sensorData.bodyIndexMaterial.SetBuffer("_BodyIndexBuffer", sensorData.bodyIndexBuffer); |
655 |
} |
656 |
} |
657 |
|
658 |
if(sensorData.depthImage != null && IsDirectX11Available() && |
659 |
userMapType == KinectManager.UserMapType.UserTexture) |
660 |
{ |
661 |
Shader depthImageShader = Shader.Find("Kinect/DepthShader"); |
662 |
|
663 |
if(depthImageShader != null) |
664 |
{ |
665 |
if (sensorData.depthImageTexture == null || sensorData.depthImageTexture.width != sensorData.depthImageWidth || sensorData.depthImageTexture.height != sensorData.depthImageHeight) |
666 |
{ |
667 |
sensorData.depthImageTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0); |
668 |
sensorData.depthImageTexture.wrapMode = TextureWrapMode.Clamp; |
669 |
sensorData.depthImageTexture.filterMode = FilterMode.Point; |
670 |
} |
671 |
|
672 |
sensorData.depthImageMaterial = new Material(depthImageShader); |
673 |
|
674 |
sensorData.depthImageMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); |
675 |
|
676 |
sensorData.depthImageMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth); |
677 |
sensorData.depthImageMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight); |
678 |
|
679 |
sensorData.depthImageBuffer = new ComputeBuffer(sensorData.depthImage.Length, sizeof(float)); |
680 |
sensorData.depthImageMaterial.SetBuffer("_DepthBuffer", sensorData.depthImageBuffer); |
681 |
|
682 |
sensorData.depthHistBuffer = new ComputeBuffer(5001, sizeof(float)); |
683 |
sensorData.depthImageMaterial.SetBuffer("_HistBuffer", sensorData.depthHistBuffer); |
684 |
|
685 |
// use body index buffer to overcome the linear color correction |
686 |
sensorData.depthImageMaterial.SetBuffer("_BodyIndexBuffer", sensorData.bodyIndexBuffer); |
687 |
} |
688 |
} |
689 |
|
690 |
if(sensorData.colorImage != null) |
691 |
{ |
692 |
if (sensorData.colorImageTexture == null || sensorData.colorImageTexture.width != sensorData.colorImageWidth || sensorData.colorImageTexture.height != sensorData.colorImageHeight) |
693 |
{ |
694 |
sensorData.colorImageTexture = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.RGBA32, false); |
695 |
} |
696 |
} |
697 |
|
698 |
// check if background removal requires cut-out image |
699 |
bool bBrRequiresCutOut = brManager && (!brManager.colorCameraResolution || !sensorInt.IsBRHiResSupported()); |
700 |
|
701 |
if(sensorData.bodyIndexImage != null && sensorData.colorImage != null && IsDirectX11Available() && |
702 |
(userMapType == KinectManager.UserMapType.CutOutTexture || bBrRequiresCutOut)) |
703 |
{ |
704 |
Shader depth2ColorShader = Shader.Find("Kinect/Depth2ColorShader"); |
705 |
|
706 |
if(depth2ColorShader != null) |
707 |
{ |
708 |
if (sensorData.depth2ColorTexture == null || sensorData.depth2ColorTexture.width != sensorData.depthImageWidth || sensorData.depth2ColorTexture.height != sensorData.depthImageHeight) |
709 |
{ |
710 |
sensorData.depth2ColorTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0); |
711 |
sensorData.depth2ColorTexture.wrapMode = TextureWrapMode.Clamp; |
712 |
sensorData.depth2ColorTexture.filterMode = FilterMode.Point; |
713 |
} |
714 |
|
715 |
sensorData.depth2ColorMaterial = new Material(depth2ColorShader); |
716 |
|
717 |
sensorData.depth2ColorMaterial.SetFloat("_ColorResX", (float)sensorData.colorImageWidth); |
718 |
sensorData.depth2ColorMaterial.SetFloat("_ColorResY", (float)sensorData.colorImageHeight); |
719 |
sensorData.depth2ColorMaterial.SetFloat("_DepthResX", (float)sensorData.depthImageWidth); |
720 |
sensorData.depth2ColorMaterial.SetFloat("_DepthResY", (float)sensorData.depthImageHeight); |
721 |
|
722 |
sensorData.depth2ColorBuffer = new ComputeBuffer(sensorData.depthImage.Length, sizeof(float) * 2); |
723 |
sensorData.depth2ColorMaterial.SetBuffer("_ColorCoords", sensorData.depth2ColorBuffer); |
724 |
|
725 |
sensorData.depth2ColorCoords = new Vector2[sensorData.depthImage.Length]; |
726 |
} |
727 |
} |
728 |
|
729 |
// if(sensorData.bodyIndexImage != null && sensorData.colorImage != null && IsDirectX11Available() && |
730 |
// (userMapType == KinectManager.UserMapType.CutOutTexture || bBrRequiresCutOut)) |
731 |
// { |
732 |
// sensorData.depth2ColorCoords = new Vector2[sensorData.depthImage.Length]; |
733 |
// } |
734 |
|
735 |
} |
736 |
} |
737 |
else |
738 |
{ |
739 |
sensorInt.FreeSensorInterface(false); |
740 |
} |
741 |
} |
742 |
catch (Exception ex) |
743 |
{ |
744 |
Debug.LogError("Initialization of the sensor failed."); |
745 |
Debug.LogError(ex.ToString()); |
746 |
|
747 |
try |
748 |
{ |
749 |
sensorInt.FreeSensorInterface(false); |
750 |
} |
751 |
catch (Exception) |
752 |
{ |
753 |
// do nothing |
754 |
} |
755 |
} |
756 |
} |
757 |
|
758 |
return sensorData; |
759 |
} |
760 |
|
761 |
// closes opened readers and closes the sensor |
762 |
public static void CloseSensor(SensorData sensorData) |
763 |
{ |
764 |
FinishBackgroundRemoval(sensorData); |
765 |
//FinishColorBackground(sensorData); |
766 |
|
767 |
if(sensorData != null && sensorData.sensorInterface != null) |
768 |
{ |
769 |
sensorData.sensorInterface.CloseSensor(sensorData); |
770 |
} |
771 |
|
772 |
if(sensorData.bodyIndexBuffer != null) |
773 |
{ |
774 |
sensorData.bodyIndexBuffer.Release(); |
775 |
sensorData.bodyIndexBuffer = null; |
776 |
} |
777 |
|
778 |
if(sensorData.depthImageBuffer != null) |
779 |
{ |
780 |
sensorData.depthImageBuffer.Release(); |
781 |
sensorData.depthImageBuffer = null; |
782 |
} |
783 |
|
784 |
if(sensorData.depthHistBuffer != null) |
785 |
{ |
786 |
sensorData.depthHistBuffer.Release(); |
787 |
sensorData.depthHistBuffer = null; |
788 |
} |
789 |
|
790 |
if(sensorData.depth2ColorBuffer != null) |
791 |
{ |
792 |
sensorData.depth2ColorBuffer.Release(); |
793 |
sensorData.depth2ColorBuffer = null; |
794 |
} |
795 |
|
796 |
//if (sensorData.depth2ColorTexture != null) |
797 |
//{ |
798 |
// sensorData.depth2ColorTexture.Release(); |
799 |
// sensorData.depth2ColorTexture = null; |
800 |
//} |
801 |
|
802 |
//sensorData.depth2ColorMaterial = null; |
803 |
//sensorData.depth2ColorCoords = null; |
804 |
} |
805 |
|
806 |
// invoked periodically to update sensor data, if needed |
807 |
public static bool UpdateSensorData(SensorData sensorData) |
808 |
{ |
809 |
bool bResult = false; |
810 |
|
811 |
if(sensorData.sensorInterface != null) |
812 |
{ |
813 |
bResult = sensorData.sensorInterface.UpdateSensorData(sensorData); |
814 |
} |
815 |
|
816 |
return bResult; |
817 |
} |
818 |
|
819 |
// returns the mirror joint of the given joint |
820 |
public static JointType GetMirrorJoint(JointType joint) |
821 |
{ |
822 |
switch(joint) |
823 |
{ |
824 |
case JointType.ShoulderLeft: |
825 |
return JointType.ShoulderRight; |
826 |
case JointType.ElbowLeft: |
827 |
return JointType.ElbowRight; |
828 |
case JointType.WristLeft: |
829 |
return JointType.WristRight; |
830 |
case JointType.HandLeft: |
831 |
return JointType.HandRight; |
832 |
|
833 |
case JointType.ShoulderRight: |
834 |
return JointType.ShoulderLeft; |
835 |
case JointType.ElbowRight: |
836 |
return JointType.ElbowLeft; |
837 |
case JointType.WristRight: |
838 |
return JointType.WristLeft; |
839 |
case JointType.HandRight: |
840 |
return JointType.HandLeft; |
841 |
|
842 |
case JointType.HipLeft: |
843 |
return JointType.HipRight; |
844 |
case JointType.KneeLeft: |
845 |
return JointType.KneeRight; |
846 |
case JointType.AnkleLeft: |
847 |
return JointType.AnkleRight; |
848 |
case JointType.FootLeft: |
849 |
return JointType.FootRight; |
850 |
|
851 |
case JointType.HipRight: |
852 |
return JointType.HipLeft; |
853 |
case JointType.KneeRight: |
854 |
return JointType.KneeLeft; |
855 |
case JointType.AnkleRight: |
856 |
return JointType.AnkleLeft; |
857 |
case JointType.FootRight: |
858 |
return JointType.FootLeft; |
859 |
|
860 |
case JointType.HandTipLeft: |
861 |
return JointType.HandTipRight; |
862 |
case JointType.ThumbLeft: |
863 |
return JointType.ThumbRight; |
864 |
|
865 |
case JointType.HandTipRight: |
866 |
return JointType.HandTipLeft; |
867 |
case JointType.ThumbRight: |
868 |
return JointType.ThumbLeft; |
869 |
} |
870 |
|
871 |
return joint; |
872 |
} |
873 |
|
874 |
// gets new multi source frame |
875 |
public static bool GetMultiSourceFrame(SensorData sensorData) |
876 |
{ |
877 |
bool bResult = false; |
878 |
|
879 |
if(sensorData.sensorInterface != null) |
880 |
{ |
881 |
bResult = sensorData.sensorInterface.GetMultiSourceFrame(sensorData); |
882 |
} |
883 |
|
884 |
return bResult; |
885 |
} |
886 |
|
887 |
// frees last multi source frame |
888 |
public static void FreeMultiSourceFrame(SensorData sensorData) |
889 |
{ |
890 |
if(sensorData.sensorInterface != null) |
891 |
{ |
892 |
sensorData.sensorInterface.FreeMultiSourceFrame(sensorData); |
893 |
} |
894 |
} |
895 |
|
896 |
// gets matrix values as csv line |
897 |
public static string GetMatrixAsCsv(ref Matrix4x4 mat, char delimiter) |
898 |
{ |
899 |
// create the output string |
900 |
StringBuilder sbBuf = new StringBuilder(); |
901 |
//const char delimiter = ','; |
902 |
|
903 |
sbBuf.Append("km").Append(delimiter); |
904 |
|
905 |
for(int i = 0; i < 4; i++) |
906 |
{ |
907 |
Vector4 vRow = mat.GetRow(i); |
908 |
|
909 |
sbBuf.AppendFormat("{0:F3}", vRow.x).Append(delimiter); |
910 |
sbBuf.AppendFormat("{0:F3}", vRow.y).Append(delimiter); |
911 |
sbBuf.AppendFormat("{0:F3}", vRow.z).Append(delimiter); |
912 |
sbBuf.AppendFormat("{0:F3}", vRow.w).Append(delimiter); |
913 |
} |
914 |
|
915 |
// remove the last delimiter |
916 |
if(sbBuf.Length > 0 && sbBuf[sbBuf.Length - 1] == delimiter) |
917 |
{ |
918 |
sbBuf.Remove(sbBuf.Length - 1, 1); |
919 |
} |
920 |
|
921 |
return sbBuf.ToString(); |
922 |
} |
923 |
|
924 |
// sets matrix values from a csv line |
925 |
public static bool SetMatrixFromCsv(string sCsvLine, ref Matrix4x4 mat) |
926 |
{ |
927 |
if(sCsvLine.Length == 0) |
928 |
return false; |
929 |
|
930 |
// split the csv line in parts |
931 |
char[] delimiters = { ',' }; |
932 |
string[] alCsvParts = sCsvLine.Split(delimiters); |
933 |
|
934 |
if(alCsvParts.Length < 17) |
935 |
return false; |
936 |
if(alCsvParts[0] != "km") |
937 |
return false; |
938 |
|
939 |
int iIndex = 1; |
940 |
for(int i = 0; i < 4; i++) |
941 |
{ |
942 |
float x = 0f, y = 0f, z = 0f, w = 0f; |
943 |
|
944 |
float.TryParse(alCsvParts[iIndex], out x); |
945 |
float.TryParse(alCsvParts[iIndex + 1], out y); |
946 |
float.TryParse(alCsvParts[iIndex + 2], out z); |
947 |
float.TryParse(alCsvParts[iIndex + 3], out w); |
948 |
iIndex += 4; |
949 |
|
950 |
Vector4 vRow = new Vector4(x, y, z, w); |
951 |
mat.SetRow(i, vRow); |
952 |
} |
953 |
|
954 |
return true; |
955 |
} |
956 |
|
957 |
// converts current hand data to a single csv line. returns empty string if there is no new data |
958 |
public static string GetHandsDataAsCsv(SensorData sensorData, ref BodyFrameData bodyFrame, ref long liRelTime, char delimiter) |
959 |
{ |
960 |
// check for invalid sensor data and if the frame is still the same |
961 |
if(sensorData == null) |
962 |
return string.Empty; |
963 |
// if(bodyFrame.liRelativeTime == liRelTime) |
964 |
// return string.Empty; |
965 |
|
966 |
// create the output string |
967 |
StringBuilder sbBuf = new StringBuilder(); |
968 |
//const char delimiter = ','; |
969 |
|
970 |
sbBuf.Append("kh").Append(delimiter); |
971 |
sbBuf.Append(bodyFrame.liRelativeTime).Append(delimiter); |
972 |
sbBuf.Append(sensorData.bodyCount).Append(delimiter); |
973 |
|
974 |
// add information for all bodies |
975 |
for(int i = 0; i < sensorData.bodyCount; i++) |
976 |
{ |
977 |
sbBuf.Append(bodyFrame.bodyData[i].bIsTracked).Append(delimiter); |
978 |
|
979 |
if(bodyFrame.bodyData[i].bIsTracked != 0) |
980 |
{ |
981 |
// add information for the tracked body - body-id and hand states |
982 |
sbBuf.Append(bodyFrame.bodyData[i].liTrackingID).Append(delimiter); |
983 |
|
984 |
sbBuf.AppendFormat("{0}", (int)bodyFrame.bodyData[i].leftHandState).Append(delimiter); |
985 |
sbBuf.AppendFormat("{0}", (int)bodyFrame.bodyData[i].leftHandConfidence).Append(delimiter); |
986 |
|
987 |
sbBuf.AppendFormat("{0}", (int)bodyFrame.bodyData[i].rightHandState).Append(delimiter); |
988 |
sbBuf.AppendFormat("{0}", (int)bodyFrame.bodyData[i].rightHandConfidence).Append(delimiter); |
989 |
} |
990 |
} |
991 |
|
992 |
// remove the last delimiter |
993 |
if(sbBuf.Length > 0 && sbBuf[sbBuf.Length - 1] == delimiter) |
994 |
{ |
995 |
sbBuf.Remove(sbBuf.Length - 1, 1); |
996 |
} |
997 |
|
998 |
return sbBuf.ToString(); |
999 |
} |
1000 |
|
1001 |
// sets current hand data from the given csv line. returns true on success, false otherwise |
1002 |
public static bool SetHandsDataFromCsv(string sCsvLine, SensorData sensorData, ref BodyFrameData bodyFrame) |
1003 |
{ |
1004 |
// check for invalid sensor data and for same frame time |
1005 |
if(sensorData == null) |
1006 |
return false; |
1007 |
if(sCsvLine.Length == 0) |
1008 |
return false; |
1009 |
|
1010 |
// split the csv line in parts |
1011 |
char[] delimiters = { ',' }; |
1012 |
string[] alCsvParts = sCsvLine.Split(delimiters); |
1013 |
|
1014 |
if(alCsvParts.Length < 3) |
1015 |
return false; |
1016 |
|
1017 |
// check the id & body count |
1018 |
int bodyCount = 0; |
1019 |
int.TryParse(alCsvParts[2], out bodyCount); |
1020 |
|
1021 |
long liRelTime = 0; |
1022 |
long.TryParse(alCsvParts[1], out liRelTime); |
1023 |
|
1024 |
if(alCsvParts[0] != "kh" || bodyCount == 0 || liRelTime == 0) |
1025 |
return false; |
1026 |
|
1027 |
// check rel time |
1028 |
if(bodyFrame.liRelativeTime > liRelTime) |
1029 |
return false; |
1030 |
|
1031 |
int iIndex = 3; |
1032 |
for(int i = 0; i < bodyCount; i++) |
1033 |
{ |
1034 |
if(alCsvParts.Length < (iIndex + 1)) |
1035 |
return false; |
1036 |
|
1037 |
// update the tracked-flag and body id |
1038 |
short bIsTracked = 0; |
1039 |
long liTrackingID = 0; |
1040 |
|
1041 |
short.TryParse(alCsvParts[iIndex], out bIsTracked); |
1042 |
iIndex++; |
1043 |
|
1044 |
if(bIsTracked != 0 && alCsvParts.Length >= (iIndex + 5)) |
1045 |
{ |
1046 |
long.TryParse(alCsvParts[iIndex], out liTrackingID); |
1047 |
iIndex++; |
1048 |
|
1049 |
int lhState = 0, lhConf = 0, rhState = 0, rhConf = 0; |
1050 |
|
1051 |
int.TryParse(alCsvParts[iIndex++], out lhState); |
1052 |
int.TryParse(alCsvParts[iIndex++], out lhConf); |
1053 |
|
1054 |
int.TryParse(alCsvParts[iIndex++], out rhState); |
1055 |
int.TryParse(alCsvParts[iIndex++], out rhConf); |
1056 |
|
1057 |
if(i < sensorData.bodyCount && bodyFrame.bodyData[i].bIsTracked != 0 && |
1058 |
bodyFrame.bodyData[i].liTrackingID == liTrackingID) |
1059 |
{ |
1060 |
bodyFrame.bodyData[i].leftHandState = (HandState)lhState; |
1061 |
bodyFrame.bodyData[i].leftHandConfidence = (TrackingConfidence)lhConf; |
1062 |
|
1063 |
bodyFrame.bodyData[i].rightHandState = (HandState)rhState; |
1064 |
bodyFrame.bodyData[i].rightHandConfidence = (TrackingConfidence)rhConf; |
1065 |
} |
1066 |
} |
1067 |
} |
1068 |
|
1069 |
return true; |
1070 |
} |
1071 |
|
1072 |
// converts current body frame to a single csv line. returns empty string if there is no new data |
1073 |
public static string GetBodyFrameAsCsv(SensorData sensorData, ref BodyFrameData bodyFrame, ref long liRelTime, ref float fUnityTime, char delimiter) |
1074 |
{ |
1075 |
// check for invalid sensor data and if the frame is still the same |
1076 |
if(sensorData == null) |
1077 |
return string.Empty; |
1078 |
if(bodyFrame.liRelativeTime == liRelTime) |
1079 |
return string.Empty; |
1080 |
|
1081 |
// create the output string |
1082 |
StringBuilder sbBuf = new StringBuilder(); |
1083 |
//const char delimiter = ','; |
1084 |
|
1085 |
sbBuf.Append("kb").Append(delimiter); |
1086 |
sbBuf.Append(bodyFrame.liRelativeTime).Append(delimiter); |
1087 |
|
1088 |
liRelTime = bodyFrame.liRelativeTime; |
1089 |
fUnityTime = Time.time; |
1090 |
|
1091 |
sbBuf.Append(sensorData.bodyCount).Append(delimiter); |
1092 |
sbBuf.Append(sensorData.jointCount).Append(delimiter); |
1093 |
|
1094 |
// add information for all bodies |
1095 |
for(int i = 0; i < sensorData.bodyCount; i++) |
1096 |
{ |
1097 |
sbBuf.Append(bodyFrame.bodyData[i].bIsTracked).Append(delimiter); |
1098 |
|
1099 |
if(bodyFrame.bodyData[i].bIsTracked != 0) |
1100 |
{ |
1101 |
// add information for the tracked body - body-id and joints |
1102 |
sbBuf.Append(bodyFrame.bodyData[i].liTrackingID).Append(delimiter); |
1103 |
|
1104 |
for(int j = 0; j < sensorData.jointCount; j++) |
1105 |
{ |
1106 |
KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
1107 |
|
1108 |
sbBuf.Append((int)jointData.trackingState).Append(delimiter); |
1109 |
|
1110 |
if(jointData.trackingState != TrackingState.NotTracked) |
1111 |
{ |
1112 |
sbBuf.AppendFormat("{0:F3}", jointData.kinectPos.x).Append(delimiter); |
1113 |
sbBuf.AppendFormat("{0:F3}", jointData.kinectPos.y).Append(delimiter); |
1114 |
sbBuf.AppendFormat("{0:F3}", jointData.kinectPos.z).Append(delimiter); |
1115 |
} |
1116 |
} |
1117 |
} |
1118 |
} |
1119 |
|
1120 |
// remove the last delimiter |
1121 |
if(sbBuf.Length > 0 && sbBuf[sbBuf.Length - 1] == delimiter) |
1122 |
{ |
1123 |
sbBuf.Remove(sbBuf.Length - 1, 1); |
1124 |
} |
1125 |
|
1126 |
return sbBuf.ToString(); |
1127 |
} |
1128 |
|
1129 |
// sets current body frame from the given csv line. returns true on success, false otherwise |
1130 |
public static bool SetBodyFrameFromCsv(string sCsvLine, SensorData sensorData, ref BodyFrameData bodyFrame, ref Matrix4x4 kinectToWorld) |
1131 |
{ |
1132 |
// check for invalid sensor data and for same frame time |
1133 |
if(sensorData == null) |
1134 |
return false; |
1135 |
if(sCsvLine.Length == 0) |
1136 |
return false; |
1137 |
|
1138 |
// split the csv line in parts |
1139 |
char[] delimiters = { ',' }; |
1140 |
string[] alCsvParts = sCsvLine.Split(delimiters); |
1141 |
|
1142 |
if(alCsvParts.Length < 4) |
1143 |
return false; |
1144 |
|
1145 |
//// wait for buffer release |
1146 |
//while(sensorData.bodyFrameReady) |
1147 |
//{ |
1148 |
// Sleep(1); |
1149 |
//} |
1150 |
|
1151 |
// check the id, body count & joint count |
1152 |
int bodyCount = 0, jointCount = 0; |
1153 |
int.TryParse(alCsvParts[2], out bodyCount); |
1154 |
int.TryParse(alCsvParts[3], out jointCount); |
1155 |
|
1156 |
long liRelTime = 0; |
1157 |
long.TryParse(alCsvParts[1], out liRelTime); |
1158 |
|
1159 |
if(alCsvParts[0] != "kb" || bodyCount == 0 || jointCount == 0 || liRelTime == 0) |
1160 |
return false; |
1161 |
// if(bodyCount != sensorData.bodyCount || jointCount != sensorData.jointCount) |
1162 |
// return false; |
1163 |
|
1164 |
if (bodyCount != sensorData.bodyCount) |
1165 |
{ |
1166 |
// set the other bodies as not tracked |
1167 |
for (int i = bodyCount; i < sensorData.bodyCount; i++) |
1168 |
{ |
1169 |
bodyFrame.bodyData[i].bIsTracked = 0; |
1170 |
} |
1171 |
} |
1172 |
|
1173 |
// update body frame data |
1174 |
bodyFrame.liPreviousTime = bodyFrame.liRelativeTime; |
1175 |
bodyFrame.liRelativeTime = liRelTime; |
1176 |
|
1177 |
int iIndex = 4; |
1178 |
for(int i = 0; i < bodyCount; i++) |
1179 |
{ |
1180 |
if(alCsvParts.Length < (iIndex + 1)) |
1181 |
return false; |
1182 |
|
1183 |
// update the tracked-flag and body id |
1184 |
short bIsTracked = 0; |
1185 |
long liTrackingID = 0; |
1186 |
|
1187 |
short.TryParse(alCsvParts[iIndex], out bIsTracked); |
1188 |
iIndex++; |
1189 |
|
1190 |
if(bIsTracked != 0 && alCsvParts.Length >= (iIndex + 1)) |
1191 |
{ |
1192 |
long.TryParse(alCsvParts[iIndex], out liTrackingID); |
1193 |
iIndex++; |
1194 |
|
1195 |
if(liTrackingID == 0) |
1196 |
{ |
1197 |
bIsTracked = 0; |
1198 |
} |
1199 |
} |
1200 |
|
1201 |
bodyFrame.bodyData[i].bIsTracked = bIsTracked; |
1202 |
bodyFrame.bodyData[i].liTrackingID = liTrackingID; |
1203 |
|
1204 |
if(bIsTracked != 0) |
1205 |
{ |
1206 |
if (jointCount != sensorData.jointCount) |
1207 |
{ |
1208 |
// set the other joints as not tracked |
1209 |
for (int j = jointCount; j < sensorData.jointCount; j++) |
1210 |
{ |
1211 |
bodyFrame.bodyData [i].joint [j].trackingState = TrackingState.NotTracked; |
1212 |
} |
1213 |
} |
1214 |
|
1215 |
// update joints' data |
1216 |
for(int j = 0; j < jointCount; j++) |
1217 |
{ |
1218 |
KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
1219 |
int iTrackingState = 0; |
1220 |
|
1221 |
if(alCsvParts.Length >= (iIndex + 1)) |
1222 |
{ |
1223 |
int.TryParse(alCsvParts[iIndex], out iTrackingState); |
1224 |
iIndex++; |
1225 |
|
1226 |
jointData.trackingState = (KinectInterop.TrackingState)iTrackingState; |
1227 |
|
1228 |
if(iTrackingState != (int)TrackingState.NotTracked && alCsvParts.Length >= (iIndex + 3)) |
1229 |
{ |
1230 |
float x = 0f, y = 0f, z = 0f; |
1231 |
|
1232 |
float.TryParse(alCsvParts[iIndex], out x); |
1233 |
float.TryParse(alCsvParts[iIndex + 1], out y); |
1234 |
float.TryParse(alCsvParts[iIndex + 2], out z); |
1235 |
iIndex += 3; |
1236 |
|
1237 |
jointData.kinectPos = new Vector3(x, y, z); |
1238 |
} |
1239 |
else |
1240 |
{ |
1241 |
jointData.kinectPos = Vector3.zero; |
1242 |
} |
1243 |
|
1244 |
jointData.position = kinectToWorld.MultiplyPoint3x4(jointData.kinectPos); |
1245 |
jointData.orientation = Quaternion.identity; |
1246 |
|
1247 |
if(j == 0) |
1248 |
{ |
1249 |
// set body position |
1250 |
bodyFrame.bodyData[i].position = jointData.position; |
1251 |
bodyFrame.bodyData[i].orientation = jointData.orientation; |
1252 |
} |
1253 |
} |
1254 |
|
1255 |
bodyFrame.bodyData[i].joint[j] = jointData; |
1256 |
} |
1257 |
} |
1258 |
} |
1259 |
|
1260 |
// calculate bone directions |
1261 |
CalcBodyFrameBoneDirs(sensorData, ref bodyFrame); |
1262 |
|
1263 |
// frame is ready |
1264 |
lock(sensorData.bodyFrameLock) |
1265 |
{ |
1266 |
sensorData.bodyFrameReady = true; |
1267 |
} |
1268 |
|
1269 |
return true; |
1270 |
} |
1271 |
|
1272 |
// Polls for new skeleton data |
1273 |
public static bool PollBodyFrame(SensorData sensorData, ref BodyFrameData bodyFrame, ref Matrix4x4 kinectToWorld, bool bIgnoreJointZ) |
1274 |
{ |
1275 |
bool bNewFrame = false; |
1276 |
|
1277 |
if(sensorData.sensorInterface != null) |
1278 |
{ |
1279 |
//// wait for buffer release |
1280 |
//while(sensorData.bodyFrameReady) |
1281 |
//{ |
1282 |
// Sleep(1); |
1283 |
//} |
1284 |
|
1285 |
bNewFrame = sensorData.sensorInterface.PollBodyFrame(sensorData, ref bodyFrame, ref kinectToWorld, bIgnoreJointZ); |
1286 |
|
1287 |
if(bNewFrame) |
1288 |
{ |
1289 |
if(bodyFrame.bTurnAnalisys && bodyFrame.liPreviousTime > 0) |
1290 |
{ |
1291 |
CalcBodyFrameJointVels(sensorData, ref bodyFrame); |
1292 |
} |
1293 |
|
1294 |
CalcBodyFrameBoneDirs(sensorData, ref bodyFrame); |
1295 |
|
1296 |
// frame is ready |
1297 |
lock(sensorData.bodyFrameLock) |
1298 |
{ |
1299 |
sensorData.bodyFrameReady = true; |
1300 |
} |
1301 |
} |
1302 |
} |
1303 |
|
1304 |
return bNewFrame; |
1305 |
} |
1306 |
|
1307 |
// calculates joint velocities in a body frame |
1308 |
private static void CalcBodyFrameJointVels(SensorData sensorData, ref BodyFrameData bodyFrame) |
1309 |
{ |
1310 |
// calculate the inter-frame time |
1311 |
float frameTime = (float)(bodyFrame.liRelativeTime - bodyFrame.liPreviousTime) / 100000000000f; |
1312 |
|
1313 |
for(int i = 0; i < sensorData.bodyCount; i++) |
1314 |
{ |
1315 |
if(bodyFrame.bodyData[i].bIsTracked != 0) |
1316 |
{ |
1317 |
for(int j = 0; j < sensorData.jointCount; j++) |
1318 |
{ |
1319 |
KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
1320 |
|
1321 |
int p = (int)sensorData.sensorInterface.GetParentJoint((KinectInterop.JointType)j); |
1322 |
Vector3 parentPos = bodyFrame.bodyData[i].joint[p].position; |
1323 |
|
1324 |
jointData.posRel = jointData.position - parentPos; |
1325 |
jointData.posVel = frameTime > 0f ? (jointData.position - jointData.posPrev) / frameTime : Vector3.zero; |
1326 |
jointData.posPrev = jointData.position; |
1327 |
|
1328 |
bodyFrame.bodyData[i].joint[j] = jointData; |
1329 |
} |
1330 |
} |
1331 |
} |
1332 |
|
1333 |
} |
1334 |
|
1335 |
// Calculates all valid bone directions in a body frame |
1336 |
private static void CalcBodyFrameBoneDirs(SensorData sensorData, ref BodyFrameData bodyFrame) |
1337 |
{ |
1338 |
for(int i = 0; i < sensorData.bodyCount; i++) |
1339 |
{ |
1340 |
if(bodyFrame.bodyData[i].bIsTracked != 0) |
1341 |
{ |
1342 |
for(int j = 0; j < sensorData.jointCount; j++) |
1343 |
{ |
1344 |
if(j == 0) |
1345 |
{ |
1346 |
bodyFrame.bodyData[i].joint[j].direction = Vector3.zero; |
1347 |
} |
1348 |
else |
1349 |
{ |
1350 |
int jParent = (int)sensorData.sensorInterface.GetParentJoint((KinectInterop.JointType)j); |
1351 |
|
1352 |
if(bodyFrame.bodyData[i].joint[j].trackingState != TrackingState.NotTracked && |
1353 |
bodyFrame.bodyData[i].joint[jParent].trackingState != TrackingState.NotTracked) |
1354 |
{ |
1355 |
bodyFrame.bodyData[i].joint[j].direction = |
1356 |
bodyFrame.bodyData[i].joint[j].position - bodyFrame.bodyData[i].joint[jParent].position; |
1357 |
} |
1358 |
} |
1359 |
} |
1360 |
} |
1361 |
} |
1362 |
|
1363 |
} |
1364 |
|
1365 |
// Recalculates bone directions for the given body |
1366 |
public static void RecalcBoneDirs(SensorData sensorData, ref BodyData bodyData) |
1367 |
{ |
1368 |
for(int j = 0; j < bodyData.joint.Length; j++) |
1369 |
{ |
1370 |
if(j == 0) |
1371 |
{ |
1372 |
bodyData.joint[j].direction = Vector3.zero; |
1373 |
} |
1374 |
else |
1375 |
{ |
1376 |
int jParent = (int)sensorData.sensorInterface.GetParentJoint((KinectInterop.JointType)j); |
1377 |
|
1378 |
if(bodyData.joint[j].trackingState != TrackingState.NotTracked && |
1379 |
bodyData.joint[jParent].trackingState != TrackingState.NotTracked) |
1380 |
{ |
1381 |
bodyData.joint[j].direction = bodyData.joint[j].position - bodyData.joint[jParent].position; |
1382 |
} |
1383 |
} |
1384 |
} |
1385 |
} |
1386 |
|
1387 |
// Polls for new color frame data |
1388 |
public static bool PollColorFrame(SensorData sensorData) |
1389 |
{ |
1390 |
bool bNewFrame = false; |
1391 |
|
1392 |
if(sensorData.sensorInterface != null && !sensorData.isPlayModeEnabled) |
1393 |
{ |
1394 |
//// wait for buffer release |
1395 |
//while(sensorData.colorImageBufferReady) |
1396 |
//{ |
1397 |
// Sleep(1); |
1398 |
//} |
1399 |
|
1400 |
bNewFrame = sensorData.sensorInterface.PollColorFrame(sensorData); |
1401 |
|
1402 |
if(bNewFrame) |
1403 |
{ |
1404 |
// buffer is ready |
1405 |
lock(sensorData.colorImageBufferLock) |
1406 |
{ |
1407 |
sensorData.colorImageBufferReady = true; |
1408 |
} |
1409 |
} |
1410 |
} |
1411 |
|
1412 |
return bNewFrame; |
1413 |
} |
1414 |
|
1415 |
// Renders color texture |
1416 |
public static void RenderColorTexture(SensorData sensorData) |
1417 |
{ |
1418 |
if(sensorData.colorImageBufferReady && sensorData.colorImageTexture) |
1419 |
{ |
1420 |
sensorData.colorImageTexture.LoadRawTextureData(sensorData.colorImage); |
1421 |
sensorData.colorImageTexture.Apply(); |
1422 |
|
1423 |
// buffer is released |
1424 |
lock(sensorData.colorImageBufferLock) |
1425 |
{ |
1426 |
sensorData.colorImageBufferReady = false; |
1427 |
} |
1428 |
} |
1429 |
} |
1430 |
|
1431 |
// Polls for new depth frame data |
1432 |
public static bool PollDepthFrame(SensorData sensorData, KinectManager.UserMapType userMapType, |
1433 |
bool bLimitedUsers, ICollection<int> alTrackedIndexes) |
1434 |
{ |
1435 |
bool bNewFrame = false; |
1436 |
|
1437 |
if(sensorData.sensorInterface != null && !sensorData.isPlayModeEnabled) |
1438 |
{ |
1439 |
//// wait for buffer releases |
1440 |
//while(sensorData.bodyIndexBufferReady || sensorData.depthImageBufferReady) |
1441 |
//{ |
1442 |
// Sleep(1); |
1443 |
//} |
1444 |
|
1445 |
bNewFrame = sensorData.sensorInterface.PollDepthFrame(sensorData); |
1446 |
|
1447 |
if(bNewFrame) |
1448 |
{ |
1449 |
if(userMapType != KinectManager.UserMapType.RawUserDepth && sensorData.bodyIndexBuffer != null) |
1450 |
{ |
1451 |
byte btSelBI = sensorData.selectedBodyIndex; |
1452 |
int iBodyIndexLength = sensorData.bodyIndexImage.Length; |
1453 |
|
1454 |
// convert the body indices to string |
1455 |
string sTrackedIndices = string.Empty; |
1456 |
|
1457 |
if (bLimitedUsers) |
1458 |
{ |
1459 |
foreach(int bodyIndex in alTrackedIndexes) |
1460 |
{ |
1461 |
sTrackedIndices += (char)(0x30 + bodyIndex); |
1462 |
} |
1463 |
|
1464 |
//Debug.Log ("Tracked indices: " + sTrackedIndices); |
1465 |
} |
1466 |
|
1467 |
// create body index texture |
1468 |
if(sensorData.bodyIndexBufferData == null) |
1469 |
{ |
1470 |
sensorData.bodyIndexBufferData = new float[iBodyIndexLength]; |
1471 |
} |
1472 |
|
1473 |
for (int i = 0; i < iBodyIndexLength; i++) |
1474 |
{ |
1475 |
byte btBufBI = sensorData.bodyIndexImage[i]; |
1476 |
|
1477 |
bool bUserTracked = btSelBI != 255 ? btSelBI == btBufBI : |
1478 |
//(bLimitedUsers ? alTrackedIndexes.Contains((int)btBufBI) : btBufBI != 255); |
1479 |
(bLimitedUsers ? sTrackedIndices.IndexOf((char)(0x30 + btBufBI)) >= 0 : btBufBI != 255); |
1480 |
|
1481 |
if(bUserTracked) |
1482 |
{ |
1483 |
sensorData.bodyIndexBufferData[i] = (float)btBufBI; |
1484 |
} |
1485 |
else |
1486 |
{ |
1487 |
sensorData.bodyIndexBufferData[i] = 255f; |
1488 |
} |
1489 |
} |
1490 |
|
1491 |
// buffer is ready |
1492 |
lock(sensorData.bodyIndexBufferLock) |
1493 |
{ |
1494 |
sensorData.bodyIndexBufferReady = true; |
1495 |
} |
1496 |
} |
1497 |
|
1498 |
if(sensorData.depthImageBuffer != null && sensorData.depthHistBuffer != null && |
1499 |
userMapType == KinectManager.UserMapType.UserTexture) |
1500 |
{ |
1501 |
// create depth texture |
1502 |
if(sensorData.depthImageBufferData == null) |
1503 |
{ |
1504 |
sensorData.depthImageBufferData = new float[sensorData.depthImage.Length]; |
1505 |
sensorData.depthHistBufferData = new int[5001]; |
1506 |
sensorData.equalHistBufferData = new float[sensorData.depthHistBufferData.Length]; |
1507 |
} |
1508 |
|
1509 |
Array.Clear(sensorData.depthHistBufferData, 0, sensorData.depthHistBufferData.Length); |
1510 |
Array.Clear(sensorData.equalHistBufferData, 0, sensorData.equalHistBufferData.Length); |
1511 |
sensorData.depthHistTotalPoints = 0; |
1512 |
|
1513 |
for (int i = 0; i < sensorData.depthImage.Length; i++) |
1514 |
{ |
1515 |
int depth = sensorData.depthImage[i] < 5000 ? (int)sensorData.depthImage[i] : 5000; |
1516 |
sensorData.depthImageBufferData[i] = (float)depth; |
1517 |
|
1518 |
if(sensorData.bodyIndexImage[i] != 255) |
1519 |
{ |
1520 |
sensorData.depthHistBufferData[depth]++; |
1521 |
sensorData.depthHistTotalPoints++; |
1522 |
} |
1523 |
} |
1524 |
|
1525 |
sensorData.equalHistBufferData[0] = (float)sensorData.depthHistBufferData[0]; |
1526 |
for(int i = 1; i < sensorData.depthHistBufferData.Length; i++) |
1527 |
{ |
1528 |
sensorData.equalHistBufferData[i] = sensorData.equalHistBufferData[i - 1] + (float)sensorData.depthHistBufferData[i]; |
1529 |
} |
1530 |
|
1531 |
// buffer is ready |
1532 |
lock(sensorData.depthImageBufferLock) |
1533 |
{ |
1534 |
sensorData.depthImageBufferReady = true; |
1535 |
} |
1536 |
} |
1537 |
|
1538 |
if(sensorData.color2DepthCoords != null) |
1539 |
{ |
1540 |
//// wait for buffer release |
1541 |
//while(sensorData.depthCoordsBufferReady) |
1542 |
//{ |
1543 |
// Sleep(1); |
1544 |
//} |
1545 |
|
1546 |
if(!MapColorFrameToDepthCoords(sensorData, ref sensorData.color2DepthCoords)) |
1547 |
{ |
1548 |
sensorData.color2DepthCoords = null; |
1549 |
} |
1550 |
|
1551 |
// buffer is ready |
1552 |
lock(sensorData.depthCoordsBufferLock) |
1553 |
{ |
1554 |
sensorData.depthCoordsBufferReady = (sensorData.color2DepthCoords != null); |
1555 |
} |
1556 |
} |
1557 |
else if(sensorData.depth2ColorCoords != null && (userMapType == KinectManager.UserMapType.CutOutTexture || |
1558 |
(sensorData.sensorInterface.IsBackgroundRemovalActive() && |
1559 |
sensorData.sensorInterface.GetSensorPlatform() != KinectInterop.DepthSensorPlatform.KinectSDKv1))) |
1560 |
{ |
1561 |
//// wait for buffer release |
1562 |
//while(sensorData.depthCoordsBufferReady) |
1563 |
//{ |
1564 |
// Sleep(1); |
1565 |
//} |
1566 |
|
1567 |
if(!MapDepthFrameToColorCoords(sensorData, ref sensorData.depth2ColorCoords)) |
1568 |
{ |
1569 |
sensorData.depth2ColorCoords = null; |
1570 |
} |
1571 |
|
1572 |
// buffer is ready |
1573 |
lock(sensorData.depthCoordsBufferLock) |
1574 |
{ |
1575 |
sensorData.depthCoordsBufferReady = (sensorData.depth2ColorCoords != null); |
1576 |
} |
1577 |
} |
1578 |
|
1579 |
if(sensorData.depth2SpaceCoords != null) |
1580 |
{ |
1581 |
//// wait for buffer release |
1582 |
//while(sensorData.spaceCoordsBufferReady) |
1583 |
//{ |
1584 |
// Sleep(1); |
1585 |
//} |
1586 |
|
1587 |
if(!MapDepthFrameToSpaceCoords(sensorData, ref sensorData.depth2SpaceCoords)) |
1588 |
{ |
1589 |
sensorData.depth2SpaceCoords = null; |
1590 |
} |
1591 |
|
1592 |
// buffer is ready |
1593 |
lock(sensorData.spaceCoordsBufferLock) |
1594 |
{ |
1595 |
sensorData.spaceCoordsBufferReady = (sensorData.depth2SpaceCoords != null); |
1596 |
} |
1597 |
} |
1598 |
|
1599 |
} |
1600 |
} |
1601 |
|
1602 |
return bNewFrame; |
1603 |
} |
1604 |
|
1605 |
// Renders body-index texture |
1606 |
public static void RenderBodyIndexTexture(SensorData sensorData, KinectManager.UserMapType userMapType) |
1607 |
{ |
1608 |
// check if buffer is ready |
1609 |
if(sensorData.bodyIndexBufferReady) |
1610 |
{ |
1611 |
sensorData.bodyIndexBuffer.SetData(sensorData.bodyIndexBufferData); |
1612 |
Graphics.Blit(null, sensorData.bodyIndexTexture, sensorData.bodyIndexMaterial); |
1613 |
|
1614 |
if(userMapType != KinectManager.UserMapType.UserTexture || !sensorData.depthImageBufferReady) |
1615 |
{ |
1616 |
// buffer is released |
1617 |
lock(sensorData.bodyIndexBufferLock) |
1618 |
{ |
1619 |
sensorData.bodyIndexBufferReady = false; |
1620 |
} |
1621 |
} |
1622 |
} |
1623 |
} |
1624 |
|
1625 |
// Renders depth image texture |
1626 |
public static void RenderDepthImageTexture(SensorData sensorData) |
1627 |
{ |
1628 |
if(sensorData.depthImageBufferReady) |
1629 |
{ |
1630 |
sensorData.depthImageMaterial.SetFloat("_TotalPoints", (float)sensorData.depthHistTotalPoints); |
1631 |
sensorData.depthImageMaterial.SetInt("_FirstUserIndex", sensorData.firstUserIndex); |
1632 |
sensorData.depthImageBuffer.SetData(sensorData.depthImageBufferData); |
1633 |
sensorData.depthHistBuffer.SetData(sensorData.equalHistBufferData); |
1634 |
|
1635 |
Graphics.Blit(sensorData.bodyIndexTexture, sensorData.depthImageTexture, sensorData.depthImageMaterial); |
1636 |
|
1637 |
// release the buffers for the next poll |
1638 |
lock(sensorData.depthImageBufferLock) |
1639 |
{ |
1640 |
sensorData.depthImageBufferReady = false; |
1641 |
} |
1642 |
|
1643 |
lock(sensorData.bodyIndexBufferLock) |
1644 |
{ |
1645 |
sensorData.bodyIndexBufferReady = false; |
1646 |
} |
1647 |
} |
1648 |
} |
1649 |
|
1650 |
// renders depth2color texture |
1651 |
public static bool RenderDepth2ColorTex(SensorData sensorData) |
1652 |
{ |
1653 |
if(sensorData.depth2ColorMaterial != null && sensorData.depth2ColorCoords != null && sensorData.depthCoordsBufferReady) |
1654 |
{ |
1655 |
sensorData.depth2ColorBuffer.SetData(sensorData.depth2ColorCoords); |
1656 |
|
1657 |
sensorData.depth2ColorMaterial.SetTexture("_BodyTex", sensorData.bodyIndexTexture); |
1658 |
sensorData.depth2ColorMaterial.SetTexture("_ColorTex", sensorData.colorImageTexture); |
1659 |
|
1660 |
Graphics.Blit(null, sensorData.depth2ColorTexture, sensorData.depth2ColorMaterial); |
1661 |
|
1662 |
// buffer is released |
1663 |
lock(sensorData.depthCoordsBufferLock) |
1664 |
{ |
1665 |
sensorData.depthCoordsBufferReady = false; |
1666 |
} |
1667 |
|
1668 |
return true; |
1669 |
} |
1670 |
|
1671 |
return false; |
1672 |
} |
1673 |
|
1674 |
// Polls for new infrared frame data |
1675 |
public static bool PollInfraredFrame(SensorData sensorData) |
1676 |
{ |
1677 |
bool bNewFrame = false; |
1678 |
|
1679 |
if(sensorData.sensorInterface != null && !sensorData.isPlayModeEnabled) |
1680 |
{ |
1681 |
bNewFrame = sensorData.sensorInterface.PollInfraredFrame(sensorData); |
1682 |
} |
1683 |
|
1684 |
return bNewFrame; |
1685 |
} |
1686 |
|
1687 |
// returns depth frame coordinates for the given 3d Kinect-space point |
1688 |
public static Vector2 MapSpacePointToDepthCoords(SensorData sensorData, Vector3 kinectPos) |
1689 |
{ |
1690 |
Vector2 vPoint = Vector2.zero; |
1691 |
|
1692 |
if(sensorData.sensorInterface != null) |
1693 |
{ |
1694 |
vPoint = sensorData.sensorInterface.MapSpacePointToDepthCoords(sensorData, kinectPos); |
1695 |
} |
1696 |
|
1697 |
return vPoint; |
1698 |
} |
1699 |
|
1700 |
// returns 3d coordinates for the given depth-map point |
1701 |
public static Vector3 MapDepthPointToSpaceCoords(SensorData sensorData, Vector2 depthPos, ushort depthVal) |
1702 |
{ |
1703 |
Vector3 vPoint = Vector3.zero; |
1704 |
|
1705 |
if (sensorData.depth2SpaceCoords != null) |
1706 |
{ |
1707 |
int pIndex = (int)depthPos.y * sensorData.depthImageWidth + (int)depthPos.x; |
1708 |
if (pIndex >= 0 && pIndex < sensorData.depth2SpaceCoords.Length) |
1709 |
return sensorData.depth2SpaceCoords[pIndex]; |
1710 |
} |
1711 |
|
1712 |
if(sensorData.sensorInterface != null) |
1713 |
{ |
1714 |
vPoint = sensorData.sensorInterface.MapDepthPointToSpaceCoords(sensorData, depthPos, depthVal); |
1715 |
} |
1716 |
|
1717 |
return vPoint; |
1718 |
} |
1719 |
|
1720 |
// estimates space coordinates for the current depth frame |
1721 |
public static bool MapDepthFrameToSpaceCoords(SensorData sensorData, ref Vector3[] vSpaceCoords) |
1722 |
{ |
1723 |
bool bResult = false; |
1724 |
|
1725 |
if(sensorData.sensorInterface != null) |
1726 |
{ |
1727 |
bResult = sensorData.sensorInterface.MapDepthFrameToSpaceCoords(sensorData, ref vSpaceCoords); |
1728 |
} |
1729 |
|
1730 |
return bResult; |
1731 |
} |
1732 |
|
1733 |
// returns color-map coordinates for the given depth point |
1734 |
public static Vector2 MapDepthPointToColorCoords(SensorData sensorData, Vector2 depthPos, ushort depthVal) |
1735 |
{ |
1736 |
Vector2 vPoint = Vector2.zero; |
1737 |
|
1738 |
if (sensorData.depth2ColorCoords != null) |
1739 |
{ |
1740 |
int pIndex = (int)depthPos.y * sensorData.depthImageWidth + (int)depthPos.x; |
1741 |
if (pIndex >= 0 && pIndex < sensorData.depth2ColorCoords.Length) |
1742 |
return sensorData.depth2ColorCoords[pIndex]; |
1743 |
} |
1744 |
|
1745 |
if(sensorData.sensorInterface != null) |
1746 |
{ |
1747 |
vPoint = sensorData.sensorInterface.MapDepthPointToColorCoords(sensorData, depthPos, depthVal); |
1748 |
} |
1749 |
|
1750 |
return vPoint; |
1751 |
} |
1752 |
|
1753 |
// estimates color-map coordinates for the current depth frame |
1754 |
public static bool MapDepthFrameToColorCoords(SensorData sensorData, ref Vector2[] vColorCoords) |
1755 |
{ |
1756 |
bool bResult = false; |
1757 |
|
1758 |
if(sensorData.sensorInterface != null) |
1759 |
{ |
1760 |
bResult = sensorData.sensorInterface.MapDepthFrameToColorCoords(sensorData, ref vColorCoords); |
1761 |
} |
1762 |
|
1763 |
return bResult; |
1764 |
} |
1765 |
|
1766 |
// estimates depth-map coordinates for the current color frame |
1767 |
public static bool MapColorFrameToDepthCoords(SensorData sensorData, ref Vector2[] vDepthCoords) |
1768 |
{ |
1769 |
bool bResult = false; |
1770 |
|
1771 |
if(sensorData.sensorInterface != null) |
1772 |
{ |
1773 |
bResult = sensorData.sensorInterface.MapColorFrameToDepthCoords(sensorData, ref vDepthCoords); |
1774 |
} |
1775 |
|
1776 |
return bResult; |
1777 |
} |
1778 |
|
1779 |
// estimates depth-map coordinates for the given color coords |
1780 |
public static Vector2 MapColorPointToDepthCoords(SensorData sensorData, Vector2 colorPos, bool bReadDepthCoordsIfNeeded) |
1781 |
{ |
1782 |
Vector2 vPoint = Vector2.zero; |
1783 |
|
1784 |
if(sensorData.sensorInterface != null && !float.IsInfinity(colorPos.x) && !float.IsInfinity(colorPos.y)) |
1785 |
{ |
1786 |
int cIndex = (int)colorPos.y * sensorData.colorImageWidth + (int)colorPos.x; |
1787 |
|
1788 |
if(sensorData.color2DepthCoords != null) |
1789 |
{ |
1790 |
if (cIndex >= 0 && cIndex < sensorData.color2DepthCoords.Length) |
1791 |
{ |
1792 |
vPoint = sensorData.color2DepthCoords[cIndex]; |
1793 |
} |
1794 |
} |
1795 |
else if(bReadDepthCoordsIfNeeded) |
1796 |
{ |
1797 |
Vector2[] vDepthCoords = new Vector2[sensorData.colorImageWidth * sensorData.colorImageHeight]; |
1798 |
|
1799 |
if(MapColorFrameToDepthCoords(sensorData, ref vDepthCoords)) |
1800 |
{ |
1801 |
if (cIndex >= 0 && cIndex < vDepthCoords.Length) |
1802 |
{ |
1803 |
vPoint = vDepthCoords[cIndex]; |
1804 |
} |
1805 |
} |
1806 |
|
1807 |
vDepthCoords = null; |
1808 |
} |
1809 |
} |
1810 |
|
1811 |
return vPoint; |
1812 |
} |
1813 |
|
1814 |
// draws a rectangle on texture-2d |
1815 |
public static void DrawRect(Texture2D a_Texture, Rect a_rect, Color a_Color) |
1816 |
{ |
1817 |
Vector2 pt1, pt2; |
1818 |
|
1819 |
// bottom |
1820 |
pt1.x = a_rect.x; pt1.y = a_rect.y; |
1821 |
pt2.x = a_rect.x + a_rect.width - 1; pt2.y = pt1.y; |
1822 |
DrawLine(a_Texture, (int)pt1.x, (int)pt1.y, (int)pt2.x, (int)pt2.y, a_Color); |
1823 |
|
1824 |
// right |
1825 |
pt1.x = pt2.x; pt1.y = pt2.y; |
1826 |
pt2.x = pt1.x; pt2.y = a_rect.y + a_rect.height - 1; |
1827 |
DrawLine(a_Texture, (int)pt1.x, (int)pt1.y, (int)pt2.x, (int)pt2.y, a_Color); |
1828 |
|
1829 |
// top |
1830 |
pt1.x = pt2.x; pt1.y = pt2.y; |
1831 |
pt2.x = a_rect.x; pt2.y = pt1.y; |
1832 |
DrawLine(a_Texture, (int)pt1.x, (int)pt1.y, (int)pt2.x, (int)pt2.y, a_Color); |
1833 |
|
1834 |
// left |
1835 |
pt1.x = pt2.x; pt1.y = pt2.y; |
1836 |
pt2.x = pt1.x; pt2.y = a_rect.y; |
1837 |
DrawLine(a_Texture, (int)pt1.x, (int)pt1.y, (int)pt2.x, (int)pt2.y, a_Color); |
1838 |
} |
1839 |
|
1840 |
// draws a line on texture-2d |
1841 |
public static void DrawLine(Texture2D a_Texture, int x1, int y1, int x2, int y2, Color a_Color) |
1842 |
{ |
1843 |
int width = a_Texture.width; |
1844 |
int height = a_Texture.height; |
1845 |
|
1846 |
int dy = y2 - y1; |
1847 |
int dx = x2 - x1; |
1848 |
|
1849 |
int stepy = 1; |
1850 |
if (dy < 0) |
1851 |
{ |
1852 |
dy = -dy; |
1853 |
stepy = -1; |
1854 |
} |
1855 |
|
1856 |
int stepx = 1; |
1857 |
if (dx < 0) |
1858 |
{ |
1859 |
dx = -dx; |
1860 |
stepx = -1; |
1861 |
} |
1862 |
|
1863 |
dy <<= 1; |
1864 |
dx <<= 1; |
1865 |
|
1866 |
if(x1 >= 0 && x1 < width && y1 >= 0 && y1 < height) |
1867 |
for(int x = -1; x <= 1; x++) |
1868 |
for(int y = -1; y <= 1; y++) |
1869 |
a_Texture.SetPixel(x1 + x, y1 + y, a_Color); |
1870 |
|
1871 |
if (dx > dy) |
1872 |
{ |
1873 |
int fraction = dy - (dx >> 1); |
1874 |
|
1875 |
while (x1 != x2) |
1876 |
{ |
1877 |
if (fraction >= 0) |
1878 |
{ |
1879 |
y1 += stepy; |
1880 |
fraction -= dx; |
1881 |
} |
1882 |
|
1883 |
x1 += stepx; |
1884 |
fraction += dy; |
1885 |
|
1886 |
if(x1 >= 0 && x1 < width && y1 >= 0 && y1 < height) |
1887 |
for(int x = -1; x <= 1; x++) |
1888 |
for(int y = -1; y <= 1; y++) |
1889 |
a_Texture.SetPixel(x1 + x, y1 + y, a_Color); |
1890 |
} |
1891 |
} |
1892 |
else |
1893 |
{ |
1894 |
int fraction = dx - (dy >> 1); |
1895 |
|
1896 |
while (y1 != y2) |
1897 |
{ |
1898 |
if (fraction >= 0) |
1899 |
{ |
1900 |
x1 += stepx; |
1901 |
fraction -= dy; |
1902 |
} |
1903 |
|
1904 |
y1 += stepy; |
1905 |
fraction += dx; |
1906 |
|
1907 |
if(x1 >= 0 && x1 < width && y1 >= 0 && y1 < height) |
1908 |
for(int x = -1; x <= 1; x++) |
1909 |
for(int y = -1; y <= 1; y++) |
1910 |
a_Texture.SetPixel(x1 + x, y1 + y, a_Color); |
1911 |
} |
1912 |
} |
1913 |
|
1914 |
} |
1915 |
|
1916 |
// copy source file to the target |
1917 |
public static bool CopyFile(string sourceFilePath, string targetFilePath, ref bool bOneCopied, ref bool bAllCopied) |
1918 |
{ |
1919 |
#if !UNITY_WSA |
1920 |
FileInfo sourceFile = new FileInfo(sourceFilePath); |
1921 |
if(!sourceFile.Exists) |
1922 |
{ |
1923 |
return false; |
1924 |
} |
1925 |
|
1926 |
FileInfo targetFile = new FileInfo(targetFilePath); |
1927 |
if(!targetFile.Directory.Exists) |
1928 |
{ |
1929 |
targetFile.Directory.Create(); |
1930 |
} |
1931 |
|
1932 |
if(!targetFile.Exists || targetFile.Length != sourceFile.Length) |
1933 |
{ |
1934 |
Debug.Log("Copying " + sourceFile.Name + "..."); |
1935 |
File.Copy(sourceFilePath, targetFilePath); |
1936 |
|
1937 |
bool bFileCopied = File.Exists(targetFilePath); |
1938 |
|
1939 |
bOneCopied = bOneCopied || bFileCopied; |
1940 |
bAllCopied = bAllCopied && bFileCopied; |
1941 |
|
1942 |
return bFileCopied; |
1943 |
} |
1944 |
#endif |
1945 |
|
1946 |
return false; |
1947 |
} |
1948 |
|
1949 |
// Copy a resource file to the target |
1950 |
public static bool CopyResourceFile(string targetFilePath, string resFileName, ref bool bOneCopied, ref bool bAllCopied) |
1951 |
{ |
1952 |
#if !UNITY_WSA |
1953 |
TextAsset textRes = Resources.Load(resFileName, typeof(TextAsset)) as TextAsset; |
1954 |
if(textRes == null) |
1955 |
{ |
1956 |
bOneCopied = false; |
1957 |
bAllCopied = false; |
1958 |
|
1959 |
return false; |
1960 |
} |
1961 |
|
1962 |
FileInfo targetFile = new FileInfo(targetFilePath); |
1963 |
if(!targetFile.Directory.Exists) |
1964 |
{ |
1965 |
targetFile.Directory.Create(); |
1966 |
} |
1967 |
|
1968 |
if(!targetFile.Exists || targetFile.Length != textRes.bytes.Length) |
1969 |
{ |
1970 |
Debug.Log("Copying " + resFileName + "..."); |
1971 |
|
1972 |
if(textRes != null) |
1973 |
{ |
1974 |
using (FileStream fileStream = new FileStream (targetFilePath, FileMode.Create, FileAccess.Write, FileShare.Read)) |
1975 |
{ |
1976 |
fileStream.Write(textRes.bytes, 0, textRes.bytes.Length); |
1977 |
} |
1978 |
|
1979 |
bool bFileCopied = File.Exists(targetFilePath); |
1980 |
|
1981 |
bOneCopied = bOneCopied || bFileCopied; |
1982 |
bAllCopied = bAllCopied && bFileCopied; |
1983 |
|
1984 |
return bFileCopied; |
1985 |
} |
1986 |
} |
1987 |
#endif |
1988 |
|
1989 |
return false; |
1990 |
} |
1991 |
|
1992 |
// Unzips resource file to the target path |
1993 |
public static bool UnzipResourceDirectory(string targetDirPath, string resZipFileName, string checkForDir) |
1994 |
{ |
1995 |
#if !UNITY_WSA |
1996 |
if(checkForDir != string.Empty && Directory.Exists(checkForDir)) |
1997 |
{ |
1998 |
return false; |
1999 |
} |
2000 |
|
2001 |
TextAsset textRes = Resources.Load(resZipFileName, typeof(TextAsset)) as TextAsset; |
2002 |
if(textRes == null || textRes.bytes.Length == 0) |
2003 |
{ |
2004 |
return false; |
2005 |
} |
2006 |
|
2007 |
Debug.Log("Unzipping " + resZipFileName + "..."); |
2008 |
|
2009 |
// get the resource steam |
2010 |
MemoryStream memStream = new MemoryStream(textRes.bytes); |
2011 |
|
2012 |
// fix invalid code page 437 error |
2013 |
ZipConstants.DefaultCodePage = 0; |
2014 |
|
2015 |
using(ZipInputStream s = new ZipInputStream(memStream)) |
2016 |
{ |
2017 |
ZipEntry theEntry; |
2018 |
while ((theEntry = s.GetNextEntry()) != null) |
2019 |
{ |
2020 |
//Debug.Log(theEntry.Name); |
2021 |
|
2022 |
string directoryName = targetDirPath + Path.GetDirectoryName(theEntry.Name); |
2023 |
string fileName = Path.GetFileName(theEntry.Name); |
2024 |
|
2025 |
if(!Directory.Exists(directoryName)) |
2026 |
{ |
2027 |
// create directory |
2028 |
Directory.CreateDirectory(directoryName); |
2029 |
} |
2030 |
|
2031 |
if (fileName != string.Empty && !fileName.EndsWith(".meta")) |
2032 |
{ |
2033 |
string targetFilePath = directoryName + "/" + fileName; |
2034 |
|
2035 |
using (FileStream streamWriter = File.Create(targetFilePath)) |
2036 |
{ |
2037 |
int size = 2048; |
2038 |
byte[] data = new byte[2048]; |
2039 |
|
2040 |
while (true) |
2041 |
{ |
2042 |
size = s.Read(data, 0, data.Length); |
2043 |
|
2044 |
if (size > 0) |
2045 |
{ |
2046 |
streamWriter.Write(data, 0, size); |
2047 |
} |
2048 |
else |
2049 |
{ |
2050 |
break; |
2051 |
} |
2052 |
} |
2053 |
} |
2054 |
} |
2055 |
} |
2056 |
} |
2057 |
|
2058 |
// close the resource stream |
2059 |
//memStream.Close(); |
2060 |
memStream.Dispose(); |
2061 |
|
2062 |
return true; |
2063 |
#else |
2064 |
return false; |
2065 |
#endif |
2066 |
} |
2067 |
|
2068 |
// Unzips resource file to the target path |
2069 |
public static bool UnzipResourceFiles(Dictionary<string, string> dictFilesToUnzip, string resZipFileName, |
2070 |
ref bool bOneCopied, ref bool bAllCopied) |
2071 |
{ |
2072 |
#if !UNITY_WSA |
2073 |
TextAsset textRes = Resources.Load(resZipFileName, typeof(TextAsset)) as TextAsset; |
2074 |
if(textRes == null || textRes.bytes.Length == 0) |
2075 |
{ |
2076 |
bOneCopied = false; |
2077 |
bAllCopied = false; |
2078 |
|
2079 |
return false; |
2080 |
} |
2081 |
|
2082 |
//Debug.Log("Unzipping " + resZipFileName + "..."); |
2083 |
|
2084 |
// get the resource steam |
2085 |
MemoryStream memStream = new MemoryStream(textRes.bytes); |
2086 |
|
2087 |
// fix invalid code page 437 error |
2088 |
ZipConstants.DefaultCodePage = 0; |
2089 |
|
2090 |
using(ZipInputStream s = new ZipInputStream(memStream)) |
2091 |
{ |
2092 |
ZipEntry theEntry; |
2093 |
while ((theEntry = s.GetNextEntry()) != null) |
2094 |
{ |
2095 |
//Debug.Log(theEntry.Name); |
2096 |
|
2097 |
if(dictFilesToUnzip.ContainsKey(theEntry.Name)) |
2098 |
{ |
2099 |
string targetFilePath = dictFilesToUnzip[theEntry.Name]; |
2100 |
|
2101 |
string directoryName = Path.GetDirectoryName(targetFilePath); |
2102 |
string fileName = Path.GetFileName(theEntry.Name); |
2103 |
|
2104 |
if(!Directory.Exists(directoryName)) |
2105 |
{ |
2106 |
// create directory |
2107 |
Directory.CreateDirectory(directoryName); |
2108 |
} |
2109 |
|
2110 |
FileInfo targetFile = new FileInfo(targetFilePath); |
2111 |
bool bTargetFileNewOrUpdated = !targetFile.Exists || targetFile.Length != theEntry.Size; |
2112 |
|
2113 |
if (fileName != string.Empty && bTargetFileNewOrUpdated) |
2114 |
{ |
2115 |
using (FileStream streamWriter = File.Create(targetFilePath)) |
2116 |
{ |
2117 |
int size = 2048; |
2118 |
byte[] data = new byte[2048]; |
2119 |
|
2120 |
while (true) |
2121 |
{ |
2122 |
size = s.Read(data, 0, data.Length); |
2123 |
|
2124 |
if (size > 0) |
2125 |
{ |
2126 |
streamWriter.Write(data, 0, size); |
2127 |
} |
2128 |
else |
2129 |
{ |
2130 |
break; |
2131 |
} |
2132 |
} |
2133 |
} |
2134 |
|
2135 |
bool bFileCopied = File.Exists(targetFilePath); |
2136 |
|
2137 |
bOneCopied = bOneCopied || bFileCopied; |
2138 |
bAllCopied = bAllCopied && bFileCopied; |
2139 |
} |
2140 |
} |
2141 |
|
2142 |
} |
2143 |
} |
2144 |
|
2145 |
// close the resource stream |
2146 |
//memStream.Close(); |
2147 |
memStream.Dispose(); |
2148 |
|
2149 |
return true; |
2150 |
#else |
2151 |
return false; |
2152 |
#endif |
2153 |
} |
2154 |
|
2155 |
// returns the unzipped file size in bytes, or -1 if the entry is not found in the zip |
2156 |
public static long GetUnzippedEntrySize(string resZipFileName, string sEntryName) |
2157 |
{ |
2158 |
#if !UNITY_WSA |
2159 |
TextAsset textRes = Resources.Load(resZipFileName, typeof(TextAsset)) as TextAsset; |
2160 |
if(textRes == null || textRes.bytes.Length == 0) |
2161 |
{ |
2162 |
return -1; |
2163 |
} |
2164 |
|
2165 |
// get the resource steam |
2166 |
MemoryStream memStream = new MemoryStream(textRes.bytes); |
2167 |
|
2168 |
// fix invalid code page 437 error |
2169 |
ZipConstants.DefaultCodePage = 0; |
2170 |
long entryFileSize = -1; |
2171 |
|
2172 |
using(ZipInputStream s = new ZipInputStream(memStream)) |
2173 |
{ |
2174 |
ZipEntry theEntry; |
2175 |
while ((theEntry = s.GetNextEntry()) != null) |
2176 |
{ |
2177 |
if(theEntry.Name == sEntryName) |
2178 |
{ |
2179 |
entryFileSize = theEntry.Size; |
2180 |
break; |
2181 |
} |
2182 |
|
2183 |
} |
2184 |
} |
2185 |
|
2186 |
// close the resource stream |
2187 |
//memStream.Close(); |
2188 |
memStream.Dispose(); |
2189 |
|
2190 |
return entryFileSize; |
2191 |
#else |
2192 |
return -1; |
2193 |
#endif |
2194 |
} |
2195 |
|
2196 |
// returns true if the project is running on 64-bit architecture, false if 32-bit |
2197 |
public static bool Is64bitArchitecture() |
2198 |
{ |
2199 |
int sizeOfPtr = Marshal.SizeOf(typeof(IntPtr)); |
2200 |
return (sizeOfPtr > 4); |
2201 |
} |
2202 |
|
2203 |
// returns the target dll path for the current platform (x86 or x64) |
2204 |
public static string GetTargetDllPath(string sAppPath, bool bIs64bitApp) |
2205 |
{ |
2206 |
string sTargetPath = sAppPath; |
2207 |
// string sPluginsPath = Application.dataPath + "/Plugins"; |
2208 |
// |
2209 |
// if(Directory.Exists(sPluginsPath)) |
2210 |
// { |
2211 |
// sTargetPath = sPluginsPath; |
2212 |
// |
2213 |
// //if(Application.isEditor) |
2214 |
// { |
2215 |
// string sPlatformPath = sPluginsPath + "/" + (!bIs64bitApp ? "x86" : "x86_64"); |
2216 |
// |
2217 |
// if(Directory.Exists(sPlatformPath)) |
2218 |
// { |
2219 |
// sTargetPath = sPlatformPath; |
2220 |
// } |
2221 |
// } |
2222 |
// } |
2223 |
|
2224 |
return sTargetPath; |
2225 |
} |
2226 |
|
2227 |
// cleans up objects and restarts the current level |
2228 |
public static void RestartLevel(GameObject parentObject, string callerName) |
2229 |
{ |
2230 |
Debug.Log(callerName + " is restarting level..."); |
2231 |
|
2232 |
// destroy parent object if any |
2233 |
if(parentObject) |
2234 |
{ |
2235 |
GameObject.Destroy(parentObject); |
2236 |
} |
2237 |
|
2238 |
// clean up memory assets |
2239 |
Resources.UnloadUnusedAssets(); |
2240 |
GC.Collect(); |
2241 |
|
2242 |
//if(Application.HasProLicense() && Application.isEditor) |
2243 |
{ |
2244 |
#if UNITY_EDITOR |
2245 |
// refresh the assets database |
2246 |
UnityEditor.AssetDatabase.Refresh(); |
2247 |
#endif |
2248 |
} |
2249 |
|
2250 |
// reload the same level |
2251 |
SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex); |
2252 |
//SceneManager.LoadScene(SceneManager.GetActiveScene().buildIndex); |
2253 |
} |
2254 |
|
2255 |
// sets the graphics shader level |
2256 |
public static void SetGraphicsShaderLevel(int shaderLevel) |
2257 |
{ |
2258 |
graphicsShaderLevel = shaderLevel; |
2259 |
} |
2260 |
|
2261 |
// checks if DirectX11/Direct3D-11 is turned on or not |
2262 |
public static bool IsDirectX11Available() |
2263 |
{ |
2264 |
return (graphicsShaderLevel >= 50); |
2265 |
} |
2266 |
|
2267 |
// copies open-cv dlls to the root folder, if needed |
2268 |
public static bool IsOpenCvAvailable(ref bool bNeedRestart) |
2269 |
{ |
2270 |
bNeedRestart = false; |
2271 |
|
2272 |
if(IsDirectX11Available()) |
2273 |
{ |
2274 |
// use shaders |
2275 |
return true; |
2276 |
} |
2277 |
|
2278 |
// bool bOneCopied = false, bAllCopied = true; |
2279 |
// string sTargetPath = GetTargetDllPath(".", Is64bitArchitecture()) + "/"; |
2280 |
// //string sTargetPath = "."; |
2281 |
// |
2282 |
// if(!Is64bitArchitecture()) |
2283 |
// { |
2284 |
// // 32 bit architecture |
2285 |
// sTargetPath = GetTargetDllPath(".", false) + "/"; |
2286 |
// |
2287 |
// Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
2288 |
// dictFilesToUnzip["opencv_core2410.dll"] = sTargetPath + "opencv_core2410.dll"; |
2289 |
// dictFilesToUnzip["opencv_imgproc2410.dll"] = sTargetPath + "opencv_imgproc2410.dll"; |
2290 |
// dictFilesToUnzip["msvcp120.dll"] = sTargetPath + "msvcp120.dll"; |
2291 |
// dictFilesToUnzip["msvcr120.dll"] = sTargetPath + "msvcr120.dll"; |
2292 |
// |
2293 |
// UnzipResourceFiles(dictFilesToUnzip, "opencv.x86.zip", ref bOneCopied, ref bAllCopied); |
2294 |
// } |
2295 |
// else |
2296 |
// { |
2297 |
// // 64 bit architecture |
2298 |
// sTargetPath = GetTargetDllPath(".", true) + "/"; |
2299 |
// |
2300 |
// Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
2301 |
// dictFilesToUnzip["opencv_core2410.dll"] = sTargetPath + "opencv_core2410.dll"; |
2302 |
// dictFilesToUnzip["opencv_imgproc2410.dll"] = sTargetPath + "opencv_imgproc2410.dll"; |
2303 |
// dictFilesToUnzip["msvcp120.dll"] = sTargetPath + "msvcp120.dll"; |
2304 |
// dictFilesToUnzip["msvcr120.dll"] = sTargetPath + "msvcr120.dll"; |
2305 |
// |
2306 |
// UnzipResourceFiles(dictFilesToUnzip, "opencv.x64.zip", ref bOneCopied, ref bAllCopied); |
2307 |
// } |
2308 |
// |
2309 |
// bNeedRestart = (bOneCopied && bAllCopied); |
2310 |
|
2311 |
return true; |
2312 |
} |
2313 |
|
2314 |
// initializes background removal with shaders |
2315 |
public static bool InitBackgroundRemoval(SensorData sensorData, bool isHiResPrefered) |
2316 |
{ |
2317 |
if(sensorData != null && sensorData.bodyIndexImage != null && sensorData.colorImage != null |
2318 |
&& IsDirectX11Available()) |
2319 |
{ |
2320 |
Shader erodeBodyShader = Shader.Find("Custom/Erode"); |
2321 |
sensorData.erodeBodyMaterial = new Material(erodeBodyShader); |
2322 |
sensorData.erodeBodyMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth); |
2323 |
sensorData.erodeBodyMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight); |
2324 |
//sensorData.erodeBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); |
2325 |
|
2326 |
Shader dilateBodyShader = Shader.Find("Custom/Dilate"); |
2327 |
sensorData.dilateBodyMaterial = new Material(dilateBodyShader); |
2328 |
sensorData.dilateBodyMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth); |
2329 |
sensorData.dilateBodyMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight); |
2330 |
//sensorData.dilateBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); |
2331 |
|
2332 |
Shader blurBodyShader = Shader.Find("Custom/BlurShader5"); |
2333 |
sensorData.blurBodyMaterial = new Material(blurBodyShader); |
2334 |
//sensorData.blurBodyMaterial.SetFloat("_Amount", 1.5f); |
2335 |
//sensorData.blurBodyMaterial.SetFloat("_BlurSizeXY", 2f); |
2336 |
|
2337 |
if(isHiResPrefered) |
2338 |
{ |
2339 |
if (sensorData.alphaBodyTexture == null || sensorData.alphaBodyTexture.width != sensorData.colorImageWidth || sensorData.alphaBodyTexture.height != sensorData.colorImageHeight) |
2340 |
{ |
2341 |
sensorData.alphaBodyTexture = new RenderTexture(sensorData.colorImageWidth, sensorData.colorImageHeight, 0); |
2342 |
sensorData.alphaBodyTexture.wrapMode = TextureWrapMode.Clamp; |
2343 |
sensorData.alphaBodyTexture.filterMode = FilterMode.Point; |
2344 |
} |
2345 |
|
2346 |
Shader alphaBodyShader = Shader.Find("Kinect/Color2BodyShader"); |
2347 |
if(alphaBodyShader) |
2348 |
{ |
2349 |
sensorData.alphaBodyMaterial = new Material(alphaBodyShader); |
2350 |
|
2351 |
sensorData.alphaBodyMaterial.SetFloat("_ColorResX", (float)sensorData.colorImageWidth); |
2352 |
sensorData.alphaBodyMaterial.SetFloat("_ColorResY", (float)sensorData.colorImageHeight); |
2353 |
sensorData.alphaBodyMaterial.SetFloat("_DepthResX", (float)sensorData.depthImageWidth); |
2354 |
sensorData.alphaBodyMaterial.SetFloat("_DepthResY", (float)sensorData.depthImageHeight); |
2355 |
|
2356 |
sensorData.color2DepthBuffer = new ComputeBuffer(sensorData.colorImageWidth * sensorData.colorImageHeight, sizeof(float) * 2); |
2357 |
sensorData.alphaBodyMaterial.SetBuffer("_DepthCoords", sensorData.color2DepthBuffer); |
2358 |
} |
2359 |
|
2360 |
Shader color2DepthShader = !sensorData.invertAlphaColorMask ? Shader.Find("Kinect/Color2DepthShader") : Shader.Find("Kinect/Color2DepthShaderInv"); |
2361 |
if(color2DepthShader) |
2362 |
{ |
2363 |
if (sensorData.color2DepthTexture == null || sensorData.color2DepthTexture.width != sensorData.colorImageWidth || sensorData.color2DepthTexture.height != sensorData.colorImageHeight) |
2364 |
{ |
2365 |
sensorData.color2DepthTexture = new RenderTexture(sensorData.colorImageWidth, sensorData.colorImageHeight, 0); |
2366 |
sensorData.color2DepthTexture.wrapMode = TextureWrapMode.Clamp; |
2367 |
sensorData.color2DepthTexture.filterMode = FilterMode.Point; |
2368 |
} |
2369 |
|
2370 |
sensorData.color2DepthMaterial = new Material(color2DepthShader); |
2371 |
|
2372 |
// sensorData.color2DepthMaterial.SetFloat("_ColorResX", (float)sensorData.colorImageWidth); |
2373 |
// sensorData.color2DepthMaterial.SetFloat("_ColorResY", (float)sensorData.colorImageHeight); |
2374 |
// sensorData.color2DepthMaterial.SetFloat("_DepthResX", (float)sensorData.depthImageWidth); |
2375 |
// sensorData.color2DepthMaterial.SetFloat("_DepthResY", (float)sensorData.depthImageHeight); |
2376 |
// |
2377 |
// sensorData.color2DepthBuffer = new ComputeBuffer(sensorData.colorImageWidth * sensorData.colorImageHeight, sizeof(float) * 2); |
2378 |
// sensorData.color2DepthMaterial.SetBuffer("_DepthCoords", sensorData.color2DepthBuffer); |
2379 |
} |
2380 |
} |
2381 |
else |
2382 |
{ |
2383 |
sensorData.alphaBodyTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0); |
2384 |
sensorData.alphaBodyTexture.wrapMode = TextureWrapMode.Clamp; |
2385 |
//sensorData.alphaBodyTexture.filterMode = FilterMode.Point; |
2386 |
} |
2387 |
} |
2388 |
|
2389 |
if(isHiResPrefered && sensorData != null && sensorData.bodyIndexImage != null && sensorData.colorImage != null) |
2390 |
{ |
2391 |
sensorData.color2DepthCoords = new Vector2[sensorData.colorImageWidth * sensorData.colorImageHeight]; |
2392 |
} |
2393 |
|
2394 |
sensorData.backgroundRemovalInited = true; |
2395 |
sensorData.backgroundRemovalHiRes = isHiResPrefered; |
2396 |
|
2397 |
return true; |
2398 |
} |
2399 |
|
2400 |
// releases background removal shader resources |
2401 |
public static void FinishBackgroundRemoval(SensorData sensorData) |
2402 |
{ |
2403 |
if(sensorData == null) |
2404 |
return; |
2405 |
|
2406 |
if(sensorData.alphaBodyTexture != null) |
2407 |
{ |
2408 |
sensorData.alphaBodyTexture.Release(); |
2409 |
sensorData.alphaBodyTexture = null; |
2410 |
} |
2411 |
|
2412 |
sensorData.erodeBodyMaterial = null; |
2413 |
sensorData.dilateBodyMaterial = null; |
2414 |
sensorData.blurBodyMaterial = null; |
2415 |
|
2416 |
if(sensorData.color2DepthBuffer != null) |
2417 |
{ |
2418 |
sensorData.color2DepthBuffer.Release(); |
2419 |
sensorData.color2DepthBuffer = null; |
2420 |
} |
2421 |
|
2422 |
if(sensorData.color2DepthTexture != null) |
2423 |
{ |
2424 |
sensorData.color2DepthTexture.Release(); |
2425 |
sensorData.color2DepthTexture = null; |
2426 |
} |
2427 |
|
2428 |
sensorData.alphaBodyMaterial = null; |
2429 |
sensorData.color2DepthMaterial = null; |
2430 |
sensorData.color2DepthCoords = null; |
2431 |
|
2432 |
sensorData.backgroundRemovalInited = false; |
2433 |
} |
2434 |
|
2435 |
// computes current background removal texture |
2436 |
public static bool UpdateBackgroundRemoval(SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bAlphaTexOnly) |
2437 |
{ |
2438 |
if(sensorData.color2DepthMaterial != null && sensorData.color2DepthCoords != null && sensorData.depthCoordsBufferReady) |
2439 |
{ |
2440 |
if(sensorData.alphaBodyMaterial != null && sensorData.alphaBodyTexture) |
2441 |
{ |
2442 |
sensorData.color2DepthBuffer.SetData(sensorData.color2DepthCoords); |
2443 |
|
2444 |
sensorData.alphaBodyMaterial.SetTexture("_BodyTex", sensorData.bodyIndexTexture); |
2445 |
Graphics.Blit(null, sensorData.alphaBodyTexture, sensorData.alphaBodyMaterial); |
2446 |
|
2447 |
if(sensorData.erodeBodyMaterial != null && sensorData.dilateBodyMaterial != null && sensorData.blurBodyMaterial) |
2448 |
{ |
2449 |
ApplyErodeDilate(sensorData.alphaBodyTexture, sensorData.alphaBodyTexture, sensorData.erodeBodyMaterial, |
2450 |
sensorData.dilateBodyMaterial, sensorData.erodeIterations, sensorData.dilateIterations); |
2451 |
ApplyImageBlur(sensorData.alphaBodyTexture, sensorData.alphaBodyTexture, sensorData.blurBodyMaterial, 1, 0.6f); |
2452 |
} |
2453 |
} |
2454 |
|
2455 |
// blit the hi-res texture |
2456 |
if(!bAlphaTexOnly) |
2457 |
{ |
2458 |
//sensorData.color2DepthBuffer.SetData(sensorData.color2DepthCoords); |
2459 |
|
2460 |
sensorData.color2DepthMaterial.SetTexture("_BodyTex", sensorData.alphaBodyTexture); |
2461 |
sensorData.color2DepthMaterial.SetTexture("_ColorTex", sensorData.colorImageTexture); |
2462 |
//sensorData.color2DepthMaterial.SetColor("_DefaultClr", defaultColor); |
2463 |
|
2464 |
Graphics.Blit(null, sensorData.color2DepthTexture, sensorData.color2DepthMaterial); |
2465 |
} |
2466 |
|
2467 |
// buffer is released |
2468 |
lock(sensorData.depthCoordsBufferLock) |
2469 |
{ |
2470 |
sensorData.depthCoordsBufferReady = false; |
2471 |
} |
2472 |
} |
2473 |
else if(sensorData.depth2ColorMaterial != null && sensorData.depth2ColorCoords != null && sensorData.depthCoordsBufferReady) |
2474 |
{ |
2475 |
if(sensorData.erodeBodyMaterial != null && sensorData.dilateBodyMaterial != null && sensorData.blurBodyMaterial) |
2476 |
{ |
2477 |
ApplyErodeDilate(sensorData.bodyIndexTexture, sensorData.alphaBodyTexture, sensorData.erodeBodyMaterial, |
2478 |
sensorData.dilateBodyMaterial, sensorData.erodeIterations, sensorData.dilateIterations); |
2479 |
ApplyImageBlur(sensorData.alphaBodyTexture, sensorData.alphaBodyTexture, sensorData.blurBodyMaterial, 0, 0.6f); |
2480 |
} |
2481 |
|
2482 |
// blit the lo-res texture |
2483 |
if(!bAlphaTexOnly) |
2484 |
{ |
2485 |
sensorData.depth2ColorBuffer.SetData(sensorData.depth2ColorCoords); |
2486 |
|
2487 |
sensorData.depth2ColorMaterial.SetTexture("_BodyTex", sensorData.alphaBodyTexture); |
2488 |
sensorData.depth2ColorMaterial.SetTexture("_ColorTex", sensorData.colorImageTexture); |
2489 |
//sensorData.depth2ColorMaterial.SetColor("_DefaultClr", defaultColor); |
2490 |
|
2491 |
Graphics.Blit(null, sensorData.depth2ColorTexture, sensorData.depth2ColorMaterial); |
2492 |
} |
2493 |
|
2494 |
// buffer is released |
2495 |
lock(sensorData.depthCoordsBufferLock) |
2496 |
{ |
2497 |
sensorData.depthCoordsBufferReady = false; |
2498 |
} |
2499 |
} |
2500 |
|
2501 |
return true; |
2502 |
} |
2503 |
|
2504 |
private static void ApplyErodeDilate(RenderTexture source, RenderTexture destination, Material erodeMaterial, |
2505 |
Material dilateMaterial, int erodeIterations, int dilateIterations) |
2506 |
{ |
2507 |
if(!source || !destination || !erodeMaterial || !dilateMaterial) |
2508 |
return; |
2509 |
|
2510 |
RenderTexture[] tempTexture = new RenderTexture[2]; |
2511 |
tempTexture[0] = RenderTexture.GetTemporary(source.width, source.height, 0); |
2512 |
tempTexture[1] = RenderTexture.GetTemporary(source.width, source.height, 0); |
2513 |
|
2514 |
Graphics.Blit(source, tempTexture[0]); |
2515 |
|
2516 |
for(int i = 0; i < erodeIterations; i++) |
2517 |
{ |
2518 |
Graphics.Blit(tempTexture[i % 2], tempTexture[(i + 1) % 2], erodeMaterial); |
2519 |
} |
2520 |
|
2521 |
if((erodeIterations % 2) != 0) |
2522 |
{ |
2523 |
Graphics.Blit(tempTexture[1], tempTexture[0]); |
2524 |
} |
2525 |
|
2526 |
for(int i = 0; i < dilateIterations; i++) |
2527 |
{ |
2528 |
Graphics.Blit(tempTexture[i % 2], tempTexture[(i + 1) % 2], dilateMaterial); |
2529 |
} |
2530 |
|
2531 |
Graphics.Blit(tempTexture[dilateIterations % 2], destination); |
2532 |
|
2533 |
RenderTexture.ReleaseTemporary(tempTexture[0]); |
2534 |
RenderTexture.ReleaseTemporary(tempTexture[1]); |
2535 |
} |
2536 |
|
2537 |
private static void ApplyImageBlur(RenderTexture source, RenderTexture destination, Material blurMaterial, int blurIterations, float blurSpread) |
2538 |
{ |
2539 |
if(!source || !destination || !blurMaterial) |
2540 |
return; |
2541 |
|
2542 |
// Graphics.Blit(source, destination, blurMaterial); |
2543 |
// return; |
2544 |
|
2545 |
int rtW = source.width / 4; |
2546 |
int rtH = source.height / 4; |
2547 |
RenderTexture buffer = RenderTexture.GetTemporary(rtW, rtH, 0); |
2548 |
|
2549 |
// Copy source to the 4x4 smaller texture. |
2550 |
Downsample4x(source, buffer, blurMaterial); |
2551 |
|
2552 |
// Blur the small texture |
2553 |
for(int i = 0; i < blurIterations; i++) |
2554 |
{ |
2555 |
RenderTexture buffer2 = RenderTexture.GetTemporary(rtW, rtH, 0); |
2556 |
FourTapCone(buffer, buffer2, blurMaterial, i, blurSpread); |
2557 |
RenderTexture.ReleaseTemporary(buffer); |
2558 |
buffer = buffer2; |
2559 |
} |
2560 |
|
2561 |
Graphics.Blit(buffer, destination); |
2562 |
RenderTexture.ReleaseTemporary(buffer); |
2563 |
} |
2564 |
|
2565 |
// downsamples the texture to a quarter resolution. |
2566 |
private static void Downsample4x(RenderTexture source, RenderTexture dest, Material material) |
2567 |
{ |
2568 |
float off = 1.0f; |
2569 |
|
2570 |
Graphics.BlitMultiTap (source, dest, material, |
2571 |
new Vector2(-off, -off), |
2572 |
new Vector2(-off, off), |
2573 |
new Vector2( off, off), |
2574 |
new Vector2( off, -off) |
2575 |
); |
2576 |
} |
2577 |
|
2578 |
// performs one blur iteration. |
2579 |
private static void FourTapCone (RenderTexture source, RenderTexture dest, Material material, int iteration, float blurSpread) |
2580 |
{ |
2581 |
float off = 0.5f + iteration * blurSpread; |
2582 |
|
2583 |
Graphics.BlitMultiTap (source, dest, material, |
2584 |
new Vector2(-off, -off), |
2585 |
new Vector2(-off, off), |
2586 |
new Vector2( off, off), |
2587 |
new Vector2( off, -off) |
2588 |
); |
2589 |
} |
2590 |
|
2591 |
// returns the foregound frame rectangle, as to the required resolution |
2592 |
public static Rect GetForegroundFrameRect(SensorData sensorData, bool isHiResPrefered) |
2593 |
{ |
2594 |
if(isHiResPrefered && sensorData != null && sensorData.sensorInterface != null) |
2595 |
{ |
2596 |
if(sensorData.sensorInterface.IsBRHiResSupported() && sensorData.colorImage != null) |
2597 |
{ |
2598 |
return new Rect(0f, 0f, sensorData.colorImageWidth, sensorData.colorImageHeight); |
2599 |
} |
2600 |
} |
2601 |
|
2602 |
return sensorData != null ? new Rect(0f, 0f, sensorData.depthImageWidth, sensorData.depthImageHeight) : new Rect(); |
2603 |
} |
2604 |
|
2605 |
// returns the foregound frame length, as to the required resolution |
2606 |
public static int GetForegroundFrameLength(SensorData sensorData, bool isHiResPrefered) |
2607 |
{ |
2608 |
if(isHiResPrefered && sensorData != null && sensorData.sensorInterface != null) |
2609 |
{ |
2610 |
if(sensorData.sensorInterface.IsBRHiResSupported() && sensorData.colorImage != null) |
2611 |
{ |
2612 |
return sensorData.colorImage.Length; |
2613 |
} |
2614 |
} |
2615 |
|
2616 |
return (sensorData != null && sensorData.bodyIndexImage != null) ? sensorData.bodyIndexImage.Length * 4 : 0; |
2617 |
} |
2618 |
|
2619 |
private static bool GetForegroundAlphaFrame(SensorData sensorData, bool bLimitedUsers, ICollection<int> alTrackedIndexes, ref byte[] fgAlphaFrame) |
2620 |
{ |
2621 |
if(sensorData == null || sensorData.bodyIndexImage == null) |
2622 |
return false; |
2623 |
|
2624 |
// CvMat cvAlphaMap = new CvMat(sensorData.depthImageHeight, sensorData.depthImageWidth, MatrixType.U8C1); |
2625 |
// |
2626 |
// System.IntPtr rawPtrAlpha; |
2627 |
// cvAlphaMap.GetRawData(out rawPtrAlpha); |
2628 |
// |
2629 |
// if(sensorData.selectedBodyIndex != 255 || bLimitedUsers) |
2630 |
// { |
2631 |
// // copy body-index selectively |
2632 |
// byte btSelBI = sensorData.selectedBodyIndex; |
2633 |
// int iBodyIndexLength = sensorData.bodyIndexImage.Length; |
2634 |
// |
2635 |
// for (int i = 0; i < iBodyIndexLength; i++) |
2636 |
// { |
2637 |
// byte btBufBI = sensorData.bodyIndexImage[i]; |
2638 |
// |
2639 |
// bool bUserTracked = btSelBI != 255 ? btSelBI == btBufBI : |
2640 |
// (btBufBI != 255 ? alTrackedIndexes.Contains((int)btBufBI) : false); |
2641 |
// |
2642 |
// if(bUserTracked) |
2643 |
// { |
2644 |
// cvAlphaMap.Set1D(i, btBufBI); |
2645 |
// } |
2646 |
// else |
2647 |
// { |
2648 |
// cvAlphaMap.Set1D(i, 255); |
2649 |
// } |
2650 |
// } |
2651 |
// } |
2652 |
// else |
2653 |
// { |
2654 |
// // copy the entire body-index buffer |
2655 |
// Marshal.Copy(sensorData.bodyIndexImage, 0, rawPtrAlpha, sensorData.bodyIndexImage.Length); |
2656 |
// } |
2657 |
// |
2658 |
// // make the image b&w |
2659 |
// cvAlphaMap.Threshold(cvAlphaMap, 254, 255, ThresholdType.BinaryInv); |
2660 |
// |
2661 |
// // apply erode, dilate and blur |
2662 |
// cvAlphaMap.Erode(cvAlphaMap); |
2663 |
// cvAlphaMap.Dilate(cvAlphaMap); |
2664 |
// cvAlphaMap.Smooth(cvAlphaMap, SmoothType.Blur, 5, 5); |
2665 |
// //cvAlphaMap.Smooth(cvAlphaMap, SmoothType.Median, 7); |
2666 |
// |
2667 |
// // get the foreground image |
2668 |
// Marshal.Copy(rawPtrAlpha, fgAlphaFrame, 0, fgAlphaFrame.Length); |
2669 |
// |
2670 |
// return true; |
2671 |
return false; |
2672 |
} |
2673 |
|
2674 |
// gets the updated foreground frame, as to the required resolution |
2675 |
public static bool PollForegroundFrame(SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bLimitedUsers, ICollection<int> alTrackedIndexes, ref byte[] fgImageFrame) |
2676 |
{ |
2677 |
// if(IsDirectX11Available()) |
2678 |
// return false; |
2679 |
// |
2680 |
// if(sensorData.colorImage == null) |
2681 |
// return false; |
2682 |
// |
2683 |
// // get the alpha frame |
2684 |
// byte[] fgAlphaFrame = new byte[sensorData.bodyIndexImage.Length]; |
2685 |
// if(!GetForegroundAlphaFrame(sensorData, bLimitedUsers, alTrackedIndexes, ref fgAlphaFrame)) |
2686 |
// return false; |
2687 |
// |
2688 |
// int alphaImageLength = fgAlphaFrame.Length; |
2689 |
// int colorImageLength = sensorData.colorImageWidth * sensorData.colorImageHeight; |
2690 |
// |
2691 |
// Array.Clear(fgImageFrame, 0, fgImageFrame.Length); |
2692 |
// |
2693 |
// // try to get the full color frame coordinates |
2694 |
// if(isHiResPrefered && sensorData.color2DepthCoords != null && sensorData.depthCoordsBufferReady) |
2695 |
// { |
2696 |
// for (int i = 0, fi = 0; i < colorImageLength; i++, fi += 4) |
2697 |
// { |
2698 |
// Vector2 vDepthPos = sensorData.color2DepthCoords[i]; |
2699 |
// |
2700 |
// if(!float.IsInfinity(vDepthPos.x) && !float.IsInfinity(vDepthPos.y)) |
2701 |
// { |
2702 |
// int dx = Mathf.RoundToInt(vDepthPos.x); |
2703 |
// int dy = Mathf.RoundToInt(vDepthPos.y); |
2704 |
// |
2705 |
// int di = dx + dy * sensorData.depthImageWidth; |
2706 |
// |
2707 |
// if(di >= 0 && di < fgAlphaFrame.Length) |
2708 |
// { |
2709 |
// int ci = i << 2; |
2710 |
// |
2711 |
// fgImageFrame[fi] = sensorData.colorImage[ci]; |
2712 |
// fgImageFrame[fi + 1] = sensorData.colorImage[ci + 1]; |
2713 |
// fgImageFrame[fi + 2] = sensorData.colorImage[ci + 2]; |
2714 |
// fgImageFrame[fi + 3] = fgAlphaFrame[di]; |
2715 |
// } |
2716 |
// } |
2717 |
// else |
2718 |
// { |
2719 |
// fgImageFrame[fi + 3] = 0; |
2720 |
// } |
2721 |
// } |
2722 |
// |
2723 |
// // buffer is released |
2724 |
// lock(sensorData.depthCoordsBufferLock) |
2725 |
// { |
2726 |
// sensorData.depthCoordsBufferReady = false; |
2727 |
// } |
2728 |
// } |
2729 |
// else |
2730 |
// { |
2731 |
// for (int i = 0, fi = 0; i < alphaImageLength; i++, fi += 4) |
2732 |
// { |
2733 |
// Vector2 vColorPos = Vector2.zero; |
2734 |
// |
2735 |
// if(sensorData.depth2ColorCoords != null && sensorData.depthCoordsBufferReady) |
2736 |
// { |
2737 |
// vColorPos = sensorData.depth2ColorCoords[i]; |
2738 |
// } |
2739 |
// else |
2740 |
// { |
2741 |
// Vector2 vDepthPos = Vector2.zero; |
2742 |
// vDepthPos.x = i % sensorData.depthImageWidth; |
2743 |
// vDepthPos.y = i / sensorData.depthImageWidth; |
2744 |
// |
2745 |
// ushort userDepth = sensorData.depthImage[i]; |
2746 |
// vColorPos = MapDepthPointToColorCoords(sensorData, vDepthPos, userDepth); |
2747 |
// } |
2748 |
// |
2749 |
// if(!float.IsInfinity(vColorPos.x) && !float.IsInfinity(vColorPos.y)) |
2750 |
// { |
2751 |
// int cx = (int)vColorPos.x; |
2752 |
// int cy = (int)vColorPos.y; |
2753 |
// int colorIndex = cx + cy * sensorData.colorImageWidth; |
2754 |
// |
2755 |
// if(colorIndex >= 0 && colorIndex < colorImageLength) |
2756 |
// { |
2757 |
// int ci = colorIndex << 2; |
2758 |
// |
2759 |
// fgImageFrame[fi] = sensorData.colorImage[ci]; |
2760 |
// fgImageFrame[fi + 1] = sensorData.colorImage[ci + 1]; |
2761 |
// fgImageFrame[fi + 2] = sensorData.colorImage[ci + 2]; |
2762 |
// fgImageFrame[fi + 3] = fgAlphaFrame[i]; |
2763 |
// } |
2764 |
// } |
2765 |
// else |
2766 |
// { |
2767 |
// fgImageFrame[fi] = defaultColor.r; |
2768 |
// fgImageFrame[fi + 1] = defaultColor.g; |
2769 |
// fgImageFrame[fi + 2] = defaultColor.b; |
2770 |
// fgImageFrame[fi + 3] = fgAlphaFrame[i]; |
2771 |
// } |
2772 |
// } |
2773 |
// |
2774 |
// // buffer is released |
2775 |
// lock(sensorData.depthCoordsBufferLock) |
2776 |
// { |
2777 |
// sensorData.depthCoordsBufferReady = false; |
2778 |
// } |
2779 |
// } |
2780 |
// |
2781 |
// return true; |
2782 |
return false; |
2783 |
} |
2784 |
|
2785 |
// reads render texture contents into tex2d (it must have the same width and height). |
2786 |
public static bool RenderTex2Tex2D(RenderTexture rt, ref Texture2D tex) |
2787 |
{ |
2788 |
if(!rt || !tex || rt.width != tex.width || rt.height != tex.height) |
2789 |
return false; |
2790 |
|
2791 |
RenderTexture currentActiveRT = RenderTexture.active; |
2792 |
RenderTexture.active = rt; |
2793 |
|
2794 |
tex.ReadPixels(new Rect(0, 0, tex.width, tex.height), 0, 0); |
2795 |
tex.Apply(); |
2796 |
|
2797 |
RenderTexture.active = currentActiveRT; |
2798 |
|
2799 |
return true; |
2800 |
} |
2801 |
|
2802 |
// reads render texture contents into tex2d (it must have the same width and height). |
2803 |
public static bool RenderTex2Tex2D(RenderTexture rt, int rtX, int rtY, int rtW, int rtH, ref Texture2D tex) |
2804 |
{ |
2805 |
if(!rt || !tex || rtW != tex.width || rtH != tex.height) |
2806 |
return false; |
2807 |
|
2808 |
RenderTexture currentActiveRT = RenderTexture.active; |
2809 |
RenderTexture.active = rt; |
2810 |
|
2811 |
tex.ReadPixels(new Rect(rtX, rtY, rtW, rtH), 0, 0); |
2812 |
tex.Apply(); |
2813 |
|
2814 |
RenderTexture.active = currentActiveRT; |
2815 |
|
2816 |
return true; |
2817 |
} |
2818 |
|
2819 |
// copies source texture pixels into destination texture. Creates it, if needed. |
2820 |
public static bool CopyTex2D(Texture2D src, ref Texture2D dest) |
2821 |
{ |
2822 |
if (src == null) |
2823 |
return false; |
2824 |
|
2825 |
if (dest == null) |
2826 |
{ |
2827 |
dest = new Texture2D(src.width, src.height, src.format, false); |
2828 |
} |
2829 |
|
2830 |
if (src.width != dest.width || src.height != dest.height) |
2831 |
return false; |
2832 |
|
2833 |
Color32[] pix = src.GetPixels32(); |
2834 |
dest.SetPixels32(pix); |
2835 |
dest.Apply(); |
2836 |
|
2837 |
return true; |
2838 |
} |
2839 |
|
2840 |
// DLL Imports for native library functions |
2841 |
[DllImport("kernel32", SetLastError=true, CharSet = CharSet.Ansi)] |
2842 |
static extern IntPtr LoadLibrary([MarshalAs(UnmanagedType.LPStr)]string lpFileName); |
2843 |
|
2844 |
[DllImport("kernel32", SetLastError=true)] |
2845 |
static extern bool FreeLibrary(IntPtr hModule); |
2846 |
|
2847 |
// load the native dll to ensure the library is loaded |
2848 |
public static bool LoadNativeLib(string sLibName) |
2849 |
{ |
2850 |
string sTargetPath = KinectInterop.GetTargetDllPath(".", Is64bitArchitecture()); |
2851 |
string sFullLibPath = sTargetPath + "/" + sLibName; |
2852 |
|
2853 |
IntPtr hLibrary = LoadLibrary(sFullLibPath); |
2854 |
|
2855 |
return (hLibrary != IntPtr.Zero); |
2856 |
} |
2857 |
|
2858 |
// unloads and deletes native library |
2859 |
public static void DeleteNativeLib(string sLibName, bool bUnloadLib) |
2860 |
{ |
2861 |
string sTargetPath = KinectInterop.GetTargetDllPath(".", Is64bitArchitecture()); |
2862 |
string sFullLibPath = sTargetPath + "/" + sLibName; |
2863 |
|
2864 |
if(bUnloadLib) |
2865 |
{ |
2866 |
IntPtr hLibrary = LoadLibrary(sFullLibPath); |
2867 |
|
2868 |
if(hLibrary != IntPtr.Zero) |
2869 |
{ |
2870 |
FreeLibrary(hLibrary); |
2871 |
FreeLibrary(hLibrary); |
2872 |
} |
2873 |
} |
2874 |
|
2875 |
try |
2876 |
{ |
2877 |
// delete file |
2878 |
if(File.Exists(sFullLibPath)) |
2879 |
{ |
2880 |
File.Delete(sFullLibPath); |
2881 |
} |
2882 |
} |
2883 |
catch (Exception) |
2884 |
{ |
2885 |
Debug.Log("Could not delete file: " + sFullLibPath); |
2886 |
} |
2887 |
} |
2888 |
|
2889 |
// universal windows platform specific functions |
2890 |
|
2891 |
#if UNITY_WSA |
2892 |
[DllImport("kernelbase")] |
2893 |
public static extern void Sleep(int dwMilliseconds); |
2894 |
#else |
2895 |
[DllImport("kernel32")] |
2896 |
public static extern void Sleep(int dwMilliseconds); |
2897 |
#endif |
2898 |
|
2899 |
|
2900 |
public static bool IsFileExists(string sFilePath, long iFileSize) |
2901 |
{ |
2902 |
#if UNITY_WSA |
2903 |
return File.Exists(sFilePath); |
2904 |
#else |
2905 |
System.IO.FileInfo targetFile = new System.IO.FileInfo(sFilePath); |
2906 |
return targetFile.Exists && targetFile.Length == iFileSize; |
2907 |
#endif |
2908 |
} |
2909 |
|
2910 |
|
2911 |
public static string GetEnvironmentVariable(string sEnvVar) |
2912 |
{ |
2913 |
#if !UNITY_WSA |
2914 |
return System.Environment.GetEnvironmentVariable(sEnvVar); |
2915 |
#else |
2916 |
return String.Empty; |
2917 |
#endif |
2918 |
} |
2919 |
|
2920 |
} |