t1 / TFDContents / Assets / KinectScripts / Interfaces / Kinect2Interface.cs @ 3
이력 | 보기 | 이력해설 | 다운로드 (52.8 KB)
| 1 |
#if !(UNITY_WSA_10_0 && NETFX_CORE) |
|---|---|
| 2 |
using UnityEngine; |
| 3 |
using System.Collections; |
| 4 |
using Windows.Kinect; |
| 5 |
using System.Runtime.InteropServices; |
| 6 |
using Microsoft.Kinect.Face; |
| 7 |
using System.Collections.Generic; |
| 8 |
using System; |
| 9 |
|
| 10 |
public class Kinect2Interface : DepthSensorInterface |
| 11 |
{
|
| 12 |
// change this to false, if you aren't using Kinect-v2 only and want KM to check for available sensors |
| 13 |
public static bool sensorAlwaysAvailable = true; |
| 14 |
|
| 15 |
private KinectInterop.FrameSource sensorFlags; |
| 16 |
public KinectSensor kinectSensor; |
| 17 |
public CoordinateMapper coordMapper; |
| 18 |
|
| 19 |
private BodyFrameReader bodyFrameReader; |
| 20 |
private BodyIndexFrameReader bodyIndexFrameReader; |
| 21 |
private ColorFrameReader colorFrameReader; |
| 22 |
private DepthFrameReader depthFrameReader; |
| 23 |
private InfraredFrameReader infraredFrameReader; |
| 24 |
|
| 25 |
private MultiSourceFrameReader multiSourceFrameReader; |
| 26 |
private MultiSourceFrame multiSourceFrame; |
| 27 |
|
| 28 |
private BodyFrame msBodyFrame = null; |
| 29 |
private BodyIndexFrame msBodyIndexFrame = null; |
| 30 |
private ColorFrame msColorFrame = null; |
| 31 |
private DepthFrame msDepthFrame = null; |
| 32 |
private InfraredFrame msInfraredFrame = null; |
| 33 |
|
| 34 |
private int bodyCount; |
| 35 |
private Body[] bodyData; |
| 36 |
|
| 37 |
private bool bFaceTrackingInited = false; |
| 38 |
public FaceFrameSource[] faceFrameSources = null; |
| 39 |
public FaceFrameReader[] faceFrameReaders = null; |
| 40 |
public FaceFrameResult[] faceFrameResults = null; |
| 41 |
|
| 42 |
// private int faceDisplayWidth; |
| 43 |
// private int faceDisplayHeight; |
| 44 |
|
| 45 |
private bool isDrawFaceRect = false; |
| 46 |
public HighDefinitionFaceFrameSource[] hdFaceFrameSources = null; |
| 47 |
public HighDefinitionFaceFrameReader[] hdFaceFrameReaders = null; |
| 48 |
public FaceAlignment[] hdFaceAlignments = null; |
| 49 |
public FaceModel[] hdFaceModels = null; |
| 50 |
|
| 51 |
private bool bBackgroundRemovalInited = false; |
| 52 |
|
| 53 |
|
| 54 |
// DLL Imports for speech wrapper functions |
| 55 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "InitSpeechRecognizer")]
|
| 56 |
private static extern int InitSpeechRecognizerNative([MarshalAs(UnmanagedType.LPWStr)]string sRecoCriteria, bool bUseKinect, bool bAdaptationOff); |
| 57 |
|
| 58 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "FinishSpeechRecognizer")]
|
| 59 |
private static extern void FinishSpeechRecognizerNative(); |
| 60 |
|
| 61 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "UpdateSpeechRecognizer")]
|
| 62 |
private static extern int UpdateSpeechRecognizerNative(); |
| 63 |
|
| 64 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "LoadSpeechGrammar")]
|
| 65 |
private static extern int LoadSpeechGrammarNative([MarshalAs(UnmanagedType.LPWStr)]string sFileName, short iNewLangCode, bool bDynamic); |
| 66 |
|
| 67 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "AddGrammarPhrase")]
|
| 68 |
private static extern int AddGrammarPhraseNative([MarshalAs(UnmanagedType.LPWStr)]string sFromRule, [MarshalAs(UnmanagedType.LPWStr)]string sToRule, [MarshalAs(UnmanagedType.LPWStr)]string sPhrase, bool bClearRule, bool bCommitGrammar); |
| 69 |
|
| 70 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "AddSpeechGrammar")]
|
| 71 |
private static extern int AddSpeechGrammarNative([MarshalAs(UnmanagedType.LPWStr)]string sFileName, short iNewLangCode, bool bDynamic); |
| 72 |
|
| 73 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "AddPhraseToGrammar")]
|
| 74 |
private static extern int AddPhraseToGrammarNative([MarshalAs(UnmanagedType.LPWStr)]string sGrammarName, [MarshalAs(UnmanagedType.LPWStr)]string sFromRule, [MarshalAs(UnmanagedType.LPWStr)]string sToRule, [MarshalAs(UnmanagedType.LPWStr)]string sPhrase, bool bClearRule, bool bCommitGrammar); |
| 75 |
|
| 76 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "SetGrammarState")]
|
| 77 |
private static extern int SetGrammarStateNative([MarshalAs(UnmanagedType.LPWStr)]string sGrammarName, bool bEnableGrammar); |
| 78 |
|
| 79 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "SetRequiredConfidence")]
|
| 80 |
private static extern void SetSpeechConfidenceNative(float fConfidence); |
| 81 |
|
| 82 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "IsSoundStarted")]
|
| 83 |
private static extern bool IsSpeechStartedNative(); |
| 84 |
|
| 85 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "IsSoundEnded")]
|
| 86 |
private static extern bool IsSpeechEndedNative(); |
| 87 |
|
| 88 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "IsPhraseRecognized")]
|
| 89 |
private static extern bool IsPhraseRecognizedNative(); |
| 90 |
|
| 91 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "GetPhraseConfidence")]
|
| 92 |
private static extern float GetPhraseConfidenceNative(); |
| 93 |
|
| 94 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "GetRecognizedTag")]
|
| 95 |
private static extern IntPtr GetRecognizedPhraseTagNative(); |
| 96 |
|
| 97 |
[DllImport("Kinect2SpeechWrapper", EntryPoint = "ClearPhraseRecognized")]
|
| 98 |
private static extern void ClearRecognizedPhraseNative(); |
| 99 |
|
| 100 |
|
| 101 |
public KinectInterop.DepthSensorPlatform GetSensorPlatform() |
| 102 |
{
|
| 103 |
return KinectInterop.DepthSensorPlatform.KinectSDKv2; |
| 104 |
} |
| 105 |
|
| 106 |
public bool InitSensorInterface (bool bCopyLibs, ref bool bNeedRestart) |
| 107 |
{
|
| 108 |
bool bOneCopied = false, bAllCopied = true; |
| 109 |
string sTargetPath = KinectInterop.GetTargetDllPath(".", KinectInterop.Is64bitArchitecture()) + "/";
|
| 110 |
|
| 111 |
if(!bCopyLibs) |
| 112 |
{
|
| 113 |
// check if the native library is there |
| 114 |
string sTargetLib = sTargetPath + "KinectUnityAddin.dll"; |
| 115 |
bNeedRestart = false; |
| 116 |
|
| 117 |
string sZipFileName = !KinectInterop.Is64bitArchitecture() ? "KinectV2UnityAddin.x86.zip" : "KinectV2UnityAddin.x64.zip"; |
| 118 |
long iTargetSize = KinectInterop.GetUnzippedEntrySize(sZipFileName, "KinectUnityAddin.dll"); |
| 119 |
|
| 120 |
// System.IO.FileInfo targetFile = new System.IO.FileInfo(sTargetLib); |
| 121 |
// return targetFile.Exists && targetFile.Length == iTargetSize; |
| 122 |
return KinectInterop.IsFileExists(sTargetLib, iTargetSize); |
| 123 |
} |
| 124 |
|
| 125 |
if(!KinectInterop.Is64bitArchitecture()) |
| 126 |
{
|
| 127 |
Debug.Log("x32-architecture detected.");
|
| 128 |
|
| 129 |
//KinectInterop.CopyResourceFile(sTargetPath + "KinectUnityAddin.dll", "KinectUnityAddin.dll", ref bOneCopied, ref bAllCopied); |
| 130 |
|
| 131 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 132 |
dictFilesToUnzip["KinectUnityAddin.dll"] = sTargetPath + "KinectUnityAddin.dll"; |
| 133 |
dictFilesToUnzip["Kinect20.Face.dll"] = sTargetPath + "Kinect20.Face.dll"; |
| 134 |
dictFilesToUnzip["KinectFaceUnityAddin.dll"] = sTargetPath + "KinectFaceUnityAddin.dll"; |
| 135 |
dictFilesToUnzip["Kinect2SpeechWrapper.dll"] = sTargetPath + "Kinect2SpeechWrapper.dll"; |
| 136 |
dictFilesToUnzip["Kinect20.VisualGestureBuilder.dll"] = sTargetPath + "Kinect20.VisualGestureBuilder.dll"; |
| 137 |
dictFilesToUnzip["KinectVisualGestureBuilderUnityAddin.dll"] = sTargetPath + "KinectVisualGestureBuilderUnityAddin.dll"; |
| 138 |
dictFilesToUnzip["vgbtechs/AdaBoostTech.dll"] = sTargetPath + "vgbtechs/AdaBoostTech.dll"; |
| 139 |
dictFilesToUnzip["vgbtechs/RFRProgressTech.dll"] = sTargetPath + "vgbtechs/RFRProgressTech.dll"; |
| 140 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 141 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 142 |
|
| 143 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x86.zip", ref bOneCopied, ref bAllCopied); |
| 144 |
} |
| 145 |
else |
| 146 |
{
|
| 147 |
Debug.Log("x64-architecture detected.");
|
| 148 |
|
| 149 |
//KinectInterop.CopyResourceFile(sTargetPath + "KinectUnityAddin.dll", "KinectUnityAddin.dll.x64", ref bOneCopied, ref bAllCopied); |
| 150 |
|
| 151 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 152 |
dictFilesToUnzip["KinectUnityAddin.dll"] = sTargetPath + "KinectUnityAddin.dll"; |
| 153 |
dictFilesToUnzip["Kinect20.Face.dll"] = sTargetPath + "Kinect20.Face.dll"; |
| 154 |
dictFilesToUnzip["KinectFaceUnityAddin.dll"] = sTargetPath + "KinectFaceUnityAddin.dll"; |
| 155 |
dictFilesToUnzip["Kinect2SpeechWrapper.dll"] = sTargetPath + "Kinect2SpeechWrapper.dll"; |
| 156 |
dictFilesToUnzip["Kinect20.VisualGestureBuilder.dll"] = sTargetPath + "Kinect20.VisualGestureBuilder.dll"; |
| 157 |
dictFilesToUnzip["KinectVisualGestureBuilderUnityAddin.dll"] = sTargetPath + "KinectVisualGestureBuilderUnityAddin.dll"; |
| 158 |
dictFilesToUnzip["vgbtechs/AdaBoostTech.dll"] = sTargetPath + "vgbtechs/AdaBoostTech.dll"; |
| 159 |
dictFilesToUnzip["vgbtechs/RFRProgressTech.dll"] = sTargetPath + "vgbtechs/RFRProgressTech.dll"; |
| 160 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 161 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 162 |
|
| 163 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x64.zip", ref bOneCopied, ref bAllCopied); |
| 164 |
} |
| 165 |
|
| 166 |
KinectInterop.UnzipResourceDirectory(sTargetPath, "NuiDatabase.zip", sTargetPath + "NuiDatabase"); |
| 167 |
|
| 168 |
bNeedRestart = (bOneCopied && bAllCopied); |
| 169 |
|
| 170 |
return true; |
| 171 |
} |
| 172 |
|
| 173 |
public void FreeSensorInterface (bool bDeleteLibs) |
| 174 |
{
|
| 175 |
if(bDeleteLibs) |
| 176 |
{
|
| 177 |
KinectInterop.DeleteNativeLib("KinectUnityAddin.dll", true);
|
| 178 |
KinectInterop.DeleteNativeLib("msvcp110.dll", false);
|
| 179 |
KinectInterop.DeleteNativeLib("msvcr110.dll", false);
|
| 180 |
} |
| 181 |
} |
| 182 |
|
| 183 |
public bool IsSensorAvailable() |
| 184 |
{
|
| 185 |
KinectSensor sensor = KinectSensor.GetDefault(); |
| 186 |
|
| 187 |
if(sensor != null) |
| 188 |
{
|
| 189 |
if(sensorAlwaysAvailable) |
| 190 |
{
|
| 191 |
sensor = null; |
| 192 |
return true; |
| 193 |
} |
| 194 |
|
| 195 |
if(!sensor.IsOpen) |
| 196 |
{
|
| 197 |
sensor.Open(); |
| 198 |
} |
| 199 |
|
| 200 |
float fWaitTime = Time.realtimeSinceStartup + 3f; |
| 201 |
while(!sensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime) |
| 202 |
{
|
| 203 |
// wait for availability |
| 204 |
} |
| 205 |
|
| 206 |
bool bAvailable = sensor.IsAvailable; |
| 207 |
|
| 208 |
if(sensor.IsOpen) |
| 209 |
{
|
| 210 |
sensor.Close(); |
| 211 |
} |
| 212 |
|
| 213 |
fWaitTime = Time.realtimeSinceStartup + 3f; |
| 214 |
while(sensor.IsOpen && Time.realtimeSinceStartup < fWaitTime) |
| 215 |
{
|
| 216 |
// wait for sensor to close |
| 217 |
} |
| 218 |
|
| 219 |
sensor = null; |
| 220 |
return bAvailable; |
| 221 |
} |
| 222 |
|
| 223 |
return false; |
| 224 |
} |
| 225 |
|
| 226 |
public int GetSensorsCount() |
| 227 |
{
|
| 228 |
int numSensors = 0; |
| 229 |
|
| 230 |
KinectSensor sensor = KinectSensor.GetDefault(); |
| 231 |
if(sensor != null) |
| 232 |
{
|
| 233 |
if(!sensor.IsOpen) |
| 234 |
{
|
| 235 |
sensor.Open(); |
| 236 |
} |
| 237 |
|
| 238 |
float fWaitTime = Time.realtimeSinceStartup + 3f; |
| 239 |
while(!sensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime) |
| 240 |
{
|
| 241 |
// wait for availability |
| 242 |
} |
| 243 |
|
| 244 |
numSensors = sensor.IsAvailable ? 1 : 0; |
| 245 |
|
| 246 |
if(sensor.IsOpen) |
| 247 |
{
|
| 248 |
sensor.Close(); |
| 249 |
} |
| 250 |
|
| 251 |
fWaitTime = Time.realtimeSinceStartup + 3f; |
| 252 |
while(sensor.IsOpen && Time.realtimeSinceStartup < fWaitTime) |
| 253 |
{
|
| 254 |
// wait for sensor to close |
| 255 |
} |
| 256 |
} |
| 257 |
|
| 258 |
return numSensors; |
| 259 |
} |
| 260 |
|
| 261 |
public KinectInterop.SensorData OpenDefaultSensor (KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource) |
| 262 |
{
|
| 263 |
KinectInterop.SensorData sensorData = new KinectInterop.SensorData(); |
| 264 |
sensorFlags = dwFlags; |
| 265 |
|
| 266 |
kinectSensor = KinectSensor.GetDefault(); |
| 267 |
if(kinectSensor == null) |
| 268 |
return null; |
| 269 |
|
| 270 |
coordMapper = kinectSensor.CoordinateMapper; |
| 271 |
|
| 272 |
this.bodyCount = kinectSensor.BodyFrameSource.BodyCount; |
| 273 |
sensorData.bodyCount = this.bodyCount; |
| 274 |
sensorData.jointCount = 25; |
| 275 |
|
| 276 |
sensorData.depthCameraFOV = 60f; |
| 277 |
sensorData.colorCameraFOV = 53.8f; |
| 278 |
sensorData.depthCameraOffset = -0.05f; |
| 279 |
sensorData.faceOverlayOffset = -0.04f; |
| 280 |
|
| 281 |
if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0) |
| 282 |
{
|
| 283 |
if(!bUseMultiSource) |
| 284 |
bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); |
| 285 |
|
| 286 |
bodyData = new Body[sensorData.bodyCount]; |
| 287 |
} |
| 288 |
|
| 289 |
var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba); |
| 290 |
sensorData.colorImageWidth = frameDesc.Width; |
| 291 |
sensorData.colorImageHeight = frameDesc.Height; |
| 292 |
|
| 293 |
if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) |
| 294 |
{
|
| 295 |
if(!bUseMultiSource) |
| 296 |
colorFrameReader = kinectSensor.ColorFrameSource.OpenReader(); |
| 297 |
|
| 298 |
sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels]; |
| 299 |
} |
| 300 |
|
| 301 |
sensorData.depthImageWidth = kinectSensor.DepthFrameSource.FrameDescription.Width; |
| 302 |
sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height; |
| 303 |
|
| 304 |
if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) |
| 305 |
{
|
| 306 |
if(!bUseMultiSource) |
| 307 |
depthFrameReader = kinectSensor.DepthFrameSource.OpenReader(); |
| 308 |
|
| 309 |
sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels]; |
| 310 |
} |
| 311 |
|
| 312 |
if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0) |
| 313 |
{
|
| 314 |
if(!bUseMultiSource) |
| 315 |
bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader(); |
| 316 |
|
| 317 |
sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels]; |
| 318 |
} |
| 319 |
|
| 320 |
if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0) |
| 321 |
{
|
| 322 |
if(!bUseMultiSource) |
| 323 |
infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader(); |
| 324 |
|
| 325 |
sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels]; |
| 326 |
} |
| 327 |
|
| 328 |
//if(!kinectSensor.IsOpen) |
| 329 |
{
|
| 330 |
//Debug.Log("Opening sensor, available: " + kinectSensor.IsAvailable);
|
| 331 |
kinectSensor.Open(); |
| 332 |
} |
| 333 |
|
| 334 |
float fWaitTime = Time.realtimeSinceStartup + 3f; |
| 335 |
while(!kinectSensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime) |
| 336 |
{
|
| 337 |
// wait for sensor to open |
| 338 |
} |
| 339 |
|
| 340 |
Debug.Log("K2-sensor " + (kinectSensor.IsOpen ? "opened" : "closed") +
|
| 341 |
", available: " + kinectSensor.IsAvailable); |
| 342 |
|
| 343 |
if(bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen) |
| 344 |
{
|
| 345 |
multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)((int)dwFlags & 0x3F)); |
| 346 |
} |
| 347 |
|
| 348 |
return sensorData; |
| 349 |
} |
| 350 |
|
| 351 |
public void CloseSensor (KinectInterop.SensorData sensorData) |
| 352 |
{
|
| 353 |
if(coordMapper != null) |
| 354 |
{
|
| 355 |
coordMapper = null; |
| 356 |
} |
| 357 |
|
| 358 |
if(bodyFrameReader != null) |
| 359 |
{
|
| 360 |
bodyFrameReader.Dispose(); |
| 361 |
bodyFrameReader = null; |
| 362 |
} |
| 363 |
|
| 364 |
if(bodyIndexFrameReader != null) |
| 365 |
{
|
| 366 |
bodyIndexFrameReader.Dispose(); |
| 367 |
bodyIndexFrameReader = null; |
| 368 |
} |
| 369 |
|
| 370 |
if(colorFrameReader != null) |
| 371 |
{
|
| 372 |
colorFrameReader.Dispose(); |
| 373 |
colorFrameReader = null; |
| 374 |
} |
| 375 |
|
| 376 |
if(depthFrameReader != null) |
| 377 |
{
|
| 378 |
depthFrameReader.Dispose(); |
| 379 |
depthFrameReader = null; |
| 380 |
} |
| 381 |
|
| 382 |
if(infraredFrameReader != null) |
| 383 |
{
|
| 384 |
infraredFrameReader.Dispose(); |
| 385 |
infraredFrameReader = null; |
| 386 |
} |
| 387 |
|
| 388 |
if(multiSourceFrameReader != null) |
| 389 |
{
|
| 390 |
multiSourceFrameReader.Dispose(); |
| 391 |
multiSourceFrameReader = null; |
| 392 |
} |
| 393 |
|
| 394 |
if(kinectSensor != null) |
| 395 |
{
|
| 396 |
//if (kinectSensor.IsOpen) |
| 397 |
{
|
| 398 |
//Debug.Log("Closing sensor, available: " + kinectSensor.IsAvailable);
|
| 399 |
kinectSensor.Close(); |
| 400 |
} |
| 401 |
|
| 402 |
float fWaitTime = Time.realtimeSinceStartup + 3f; |
| 403 |
while(kinectSensor.IsOpen && Time.realtimeSinceStartup < fWaitTime) |
| 404 |
{
|
| 405 |
// wait for sensor to close |
| 406 |
} |
| 407 |
|
| 408 |
Debug.Log("K2-sensor " + (kinectSensor.IsOpen ? "opened" : "closed") +
|
| 409 |
", available: " + kinectSensor.IsAvailable); |
| 410 |
|
| 411 |
kinectSensor = null; |
| 412 |
} |
| 413 |
} |
| 414 |
|
| 415 |
public bool UpdateSensorData (KinectInterop.SensorData sensorData) |
| 416 |
{
|
| 417 |
return true; |
| 418 |
} |
| 419 |
|
| 420 |
public bool GetMultiSourceFrame (KinectInterop.SensorData sensorData) |
| 421 |
{
|
| 422 |
if(multiSourceFrameReader != null) |
| 423 |
{
|
| 424 |
multiSourceFrame = multiSourceFrameReader.AcquireLatestFrame(); |
| 425 |
|
| 426 |
if(multiSourceFrame != null) |
| 427 |
{
|
| 428 |
// try to get all frames at once |
| 429 |
msBodyFrame = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0 ? multiSourceFrame.BodyFrameReference.AcquireFrame() : null; |
| 430 |
msBodyIndexFrame = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0 ? multiSourceFrame.BodyIndexFrameReference.AcquireFrame() : null; |
| 431 |
msColorFrame = (sensorFlags & KinectInterop.FrameSource.TypeColor) != 0 ? multiSourceFrame.ColorFrameReference.AcquireFrame() : null; |
| 432 |
msDepthFrame = (sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0 ? multiSourceFrame.DepthFrameReference.AcquireFrame() : null; |
| 433 |
msInfraredFrame = (sensorFlags & KinectInterop.FrameSource.TypeInfrared) != 0 ? multiSourceFrame.InfraredFrameReference.AcquireFrame() : null; |
| 434 |
|
| 435 |
bool bAllSet = |
| 436 |
((sensorFlags & KinectInterop.FrameSource.TypeBody) == 0 || msBodyFrame != null) && |
| 437 |
((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) == 0 || msBodyIndexFrame != null) && |
| 438 |
((sensorFlags & KinectInterop.FrameSource.TypeColor) == 0 || msColorFrame != null) && |
| 439 |
((sensorFlags & KinectInterop.FrameSource.TypeDepth) == 0 || msDepthFrame != null) && |
| 440 |
((sensorFlags & KinectInterop.FrameSource.TypeInfrared) == 0 || msInfraredFrame != null); |
| 441 |
|
| 442 |
if(!bAllSet) |
| 443 |
{
|
| 444 |
// release all frames |
| 445 |
if(msBodyFrame != null) |
| 446 |
{
|
| 447 |
msBodyFrame.Dispose(); |
| 448 |
msBodyFrame = null; |
| 449 |
} |
| 450 |
|
| 451 |
if(msBodyIndexFrame != null) |
| 452 |
{
|
| 453 |
msBodyIndexFrame.Dispose(); |
| 454 |
msBodyIndexFrame = null; |
| 455 |
} |
| 456 |
|
| 457 |
if(msColorFrame != null) |
| 458 |
{
|
| 459 |
msColorFrame.Dispose(); |
| 460 |
msColorFrame = null; |
| 461 |
} |
| 462 |
|
| 463 |
if(msDepthFrame != null) |
| 464 |
{
|
| 465 |
msDepthFrame.Dispose(); |
| 466 |
msDepthFrame = null; |
| 467 |
} |
| 468 |
|
| 469 |
if(msInfraredFrame != null) |
| 470 |
{
|
| 471 |
msInfraredFrame.Dispose(); |
| 472 |
msInfraredFrame = null; |
| 473 |
} |
| 474 |
} |
| 475 |
// else |
| 476 |
// {
|
| 477 |
// bool bNeedBody = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0; |
| 478 |
// bool bNeedBodyIndex = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0; |
| 479 |
// bool bNeedColor = (sensorFlags & KinectInterop.FrameSource.TypeColor) != 0; |
| 480 |
// bool bNeedDepth = (sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0; |
| 481 |
// bool bNeedInfrared = (sensorFlags & KinectInterop.FrameSource.TypeInfrared) != 0; |
| 482 |
// |
| 483 |
// bAllSet = true; |
| 484 |
// } |
| 485 |
} |
| 486 |
|
| 487 |
return (multiSourceFrame != null); |
| 488 |
} |
| 489 |
|
| 490 |
return false; |
| 491 |
} |
| 492 |
|
| 493 |
public void FreeMultiSourceFrame (KinectInterop.SensorData sensorData) |
| 494 |
{
|
| 495 |
// release all frames |
| 496 |
if(msBodyFrame != null) |
| 497 |
{
|
| 498 |
msBodyFrame.Dispose(); |
| 499 |
msBodyFrame = null; |
| 500 |
} |
| 501 |
|
| 502 |
if(msBodyIndexFrame != null) |
| 503 |
{
|
| 504 |
msBodyIndexFrame.Dispose(); |
| 505 |
msBodyIndexFrame = null; |
| 506 |
} |
| 507 |
|
| 508 |
if(msColorFrame != null) |
| 509 |
{
|
| 510 |
msColorFrame.Dispose(); |
| 511 |
msColorFrame = null; |
| 512 |
} |
| 513 |
|
| 514 |
if(msDepthFrame != null) |
| 515 |
{
|
| 516 |
msDepthFrame.Dispose(); |
| 517 |
msDepthFrame = null; |
| 518 |
} |
| 519 |
|
| 520 |
if(msInfraredFrame != null) |
| 521 |
{
|
| 522 |
msInfraredFrame.Dispose(); |
| 523 |
msInfraredFrame = null; |
| 524 |
} |
| 525 |
|
| 526 |
if(multiSourceFrame != null) |
| 527 |
{
|
| 528 |
multiSourceFrame = null; |
| 529 |
} |
| 530 |
} |
| 531 |
|
| 532 |
public bool PollBodyFrame (KinectInterop.SensorData sensorData, ref KinectInterop.BodyFrameData bodyFrame, |
| 533 |
ref Matrix4x4 kinectToWorld, bool bIgnoreJointZ) |
| 534 |
{
|
| 535 |
bool bNewFrame = false; |
| 536 |
|
| 537 |
if((multiSourceFrameReader != null && multiSourceFrame != null) || |
| 538 |
bodyFrameReader != null) |
| 539 |
{
|
| 540 |
BodyFrame frame = multiSourceFrame != null ? msBodyFrame : |
| 541 |
bodyFrameReader.AcquireLatestFrame(); |
| 542 |
|
| 543 |
if(frame != null) |
| 544 |
{
|
| 545 |
frame.GetAndRefreshBodyData(bodyData); |
| 546 |
|
| 547 |
bodyFrame.liPreviousTime = bodyFrame.liRelativeTime; |
| 548 |
bodyFrame.liRelativeTime = frame.RelativeTime.Ticks; |
| 549 |
|
| 550 |
if(sensorData.hintHeightAngle) |
| 551 |
{
|
| 552 |
// get the floor plane |
| 553 |
Windows.Kinect.Vector4 vFloorPlane = frame.FloorClipPlane; |
| 554 |
Vector3 floorPlane = new Vector3(vFloorPlane.X, vFloorPlane.Y, vFloorPlane.Z); |
| 555 |
|
| 556 |
sensorData.sensorRotDetected = Quaternion.FromToRotation(floorPlane, Vector3.up); |
| 557 |
sensorData.sensorHgtDetected = vFloorPlane.W; |
| 558 |
} |
| 559 |
|
| 560 |
frame.Dispose(); |
| 561 |
frame = null; |
| 562 |
|
| 563 |
for(int i = 0; i < sensorData.bodyCount; i++) |
| 564 |
{
|
| 565 |
Body body = bodyData[i]; |
| 566 |
|
| 567 |
if (body == null) |
| 568 |
{
|
| 569 |
bodyFrame.bodyData[i].bIsTracked = 0; |
| 570 |
continue; |
| 571 |
} |
| 572 |
|
| 573 |
bodyFrame.bodyData[i].bIsTracked = (short)(body.IsTracked ? 1 : 0); |
| 574 |
|
| 575 |
if(body.IsTracked) |
| 576 |
{
|
| 577 |
// transfer body and joints data |
| 578 |
bodyFrame.bodyData[i].liTrackingID = (long)body.TrackingId; |
| 579 |
|
| 580 |
// cache the body joints (following the advice of Brian Chasalow) |
| 581 |
Dictionary<Windows.Kinect.JointType, Windows.Kinect.Joint> bodyJoints = body.Joints; |
| 582 |
|
| 583 |
for(int j = 0; j < sensorData.jointCount; j++) |
| 584 |
{
|
| 585 |
Windows.Kinect.Joint joint = bodyJoints[(Windows.Kinect.JointType)j]; |
| 586 |
KinectInterop.JointData jointData = bodyFrame.bodyData[i].joint[j]; |
| 587 |
|
| 588 |
//jointData.jointType = (KinectInterop.JointType)j; |
| 589 |
jointData.trackingState = (KinectInterop.TrackingState)joint.TrackingState; |
| 590 |
|
| 591 |
if((int)joint.TrackingState != (int)TrackingState.NotTracked) |
| 592 |
{
|
| 593 |
float jPosZ = (bIgnoreJointZ && j > 0) ? bodyFrame.bodyData[i].joint[0].kinectPos.z : joint.Position.Z; |
| 594 |
jointData.kinectPos = new Vector3(joint.Position.X, joint.Position.Y, joint.Position.Z); |
| 595 |
jointData.position = kinectToWorld.MultiplyPoint3x4(new Vector3(joint.Position.X, joint.Position.Y, jPosZ)); |
| 596 |
} |
| 597 |
|
| 598 |
jointData.orientation = Quaternion.identity; |
| 599 |
// Windows.Kinect.Vector4 vQ = body.JointOrientations[jointData.jointType].Orientation; |
| 600 |
// jointData.orientation = new Quaternion(vQ.X, vQ.Y, vQ.Z, vQ.W); |
| 601 |
|
| 602 |
if(j == 0) |
| 603 |
{
|
| 604 |
bodyFrame.bodyData[i].position = jointData.position; |
| 605 |
bodyFrame.bodyData[i].orientation = jointData.orientation; |
| 606 |
} |
| 607 |
|
| 608 |
bodyFrame.bodyData[i].joint[j] = jointData; |
| 609 |
} |
| 610 |
|
| 611 |
// tranfer hand states |
| 612 |
bodyFrame.bodyData[i].leftHandState = (KinectInterop.HandState)body.HandLeftState; |
| 613 |
bodyFrame.bodyData[i].leftHandConfidence = (KinectInterop.TrackingConfidence)body.HandLeftConfidence; |
| 614 |
|
| 615 |
bodyFrame.bodyData[i].rightHandState = (KinectInterop.HandState)body.HandRightState; |
| 616 |
bodyFrame.bodyData[i].rightHandConfidence = (KinectInterop.TrackingConfidence)body.HandRightConfidence; |
| 617 |
} |
| 618 |
} |
| 619 |
|
| 620 |
bNewFrame = true; |
| 621 |
} |
| 622 |
} |
| 623 |
|
| 624 |
return bNewFrame; |
| 625 |
} |
| 626 |
|
| 627 |
public bool PollColorFrame (KinectInterop.SensorData sensorData) |
| 628 |
{
|
| 629 |
bool bNewFrame = false; |
| 630 |
|
| 631 |
if((multiSourceFrameReader != null && multiSourceFrame != null) || |
| 632 |
colorFrameReader != null) |
| 633 |
{
|
| 634 |
ColorFrame colorFrame = multiSourceFrame != null ? msColorFrame : |
| 635 |
colorFrameReader.AcquireLatestFrame(); |
| 636 |
|
| 637 |
if(colorFrame != null) |
| 638 |
{
|
| 639 |
var pColorData = GCHandle.Alloc(sensorData.colorImage, GCHandleType.Pinned); |
| 640 |
colorFrame.CopyConvertedFrameDataToIntPtr(pColorData.AddrOfPinnedObject(), (uint)sensorData.colorImage.Length, ColorImageFormat.Rgba); |
| 641 |
pColorData.Free(); |
| 642 |
|
| 643 |
sensorData.lastColorFrameTime = colorFrame.RelativeTime.Ticks; |
| 644 |
|
| 645 |
colorFrame.Dispose(); |
| 646 |
colorFrame = null; |
| 647 |
|
| 648 |
bNewFrame = true; |
| 649 |
} |
| 650 |
} |
| 651 |
|
| 652 |
return bNewFrame; |
| 653 |
} |
| 654 |
|
| 655 |
public bool PollDepthFrame (KinectInterop.SensorData sensorData) |
| 656 |
{
|
| 657 |
bool bNewFrame = false; |
| 658 |
|
| 659 |
if((multiSourceFrameReader != null && multiSourceFrame != null) || |
| 660 |
depthFrameReader != null) |
| 661 |
{
|
| 662 |
DepthFrame depthFrame = multiSourceFrame != null ? msDepthFrame : |
| 663 |
depthFrameReader.AcquireLatestFrame(); |
| 664 |
|
| 665 |
if(depthFrame != null) |
| 666 |
{
|
| 667 |
var pDepthData = GCHandle.Alloc(sensorData.depthImage, GCHandleType.Pinned); |
| 668 |
depthFrame.CopyFrameDataToIntPtr(pDepthData.AddrOfPinnedObject(), (uint)sensorData.depthImage.Length * sizeof(ushort)); |
| 669 |
pDepthData.Free(); |
| 670 |
|
| 671 |
sensorData.lastDepthFrameTime = depthFrame.RelativeTime.Ticks; |
| 672 |
|
| 673 |
depthFrame.Dispose(); |
| 674 |
depthFrame = null; |
| 675 |
|
| 676 |
bNewFrame = true; |
| 677 |
} |
| 678 |
|
| 679 |
if((multiSourceFrameReader != null && multiSourceFrame != null) || |
| 680 |
bodyIndexFrameReader != null) |
| 681 |
{
|
| 682 |
BodyIndexFrame bodyIndexFrame = multiSourceFrame != null ? msBodyIndexFrame : |
| 683 |
bodyIndexFrameReader.AcquireLatestFrame(); |
| 684 |
|
| 685 |
if(bodyIndexFrame != null) |
| 686 |
{
|
| 687 |
var pBodyIndexData = GCHandle.Alloc(sensorData.bodyIndexImage, GCHandleType.Pinned); |
| 688 |
bodyIndexFrame.CopyFrameDataToIntPtr(pBodyIndexData.AddrOfPinnedObject(), (uint)sensorData.bodyIndexImage.Length); |
| 689 |
pBodyIndexData.Free(); |
| 690 |
|
| 691 |
sensorData.lastBodyIndexFrameTime = bodyIndexFrame.RelativeTime.Ticks; |
| 692 |
|
| 693 |
bodyIndexFrame.Dispose(); |
| 694 |
bodyIndexFrame = null; |
| 695 |
|
| 696 |
bNewFrame = true; |
| 697 |
} |
| 698 |
} |
| 699 |
} |
| 700 |
|
| 701 |
return bNewFrame; |
| 702 |
} |
| 703 |
|
| 704 |
public bool PollInfraredFrame (KinectInterop.SensorData sensorData) |
| 705 |
{
|
| 706 |
bool bNewFrame = false; |
| 707 |
|
| 708 |
if((multiSourceFrameReader != null && multiSourceFrame != null) || |
| 709 |
infraredFrameReader != null) |
| 710 |
{
|
| 711 |
InfraredFrame infraredFrame = multiSourceFrame != null ? msInfraredFrame : |
| 712 |
infraredFrameReader.AcquireLatestFrame(); |
| 713 |
|
| 714 |
if(infraredFrame != null) |
| 715 |
{
|
| 716 |
var pInfraredData = GCHandle.Alloc(sensorData.infraredImage, GCHandleType.Pinned); |
| 717 |
infraredFrame.CopyFrameDataToIntPtr(pInfraredData.AddrOfPinnedObject(), (uint)sensorData.infraredImage.Length * sizeof(ushort)); |
| 718 |
pInfraredData.Free(); |
| 719 |
|
| 720 |
sensorData.lastInfraredFrameTime = infraredFrame.RelativeTime.Ticks; |
| 721 |
|
| 722 |
infraredFrame.Dispose(); |
| 723 |
infraredFrame = null; |
| 724 |
|
| 725 |
bNewFrame = true; |
| 726 |
} |
| 727 |
} |
| 728 |
|
| 729 |
return bNewFrame; |
| 730 |
} |
| 731 |
|
| 732 |
public void FixJointOrientations(KinectInterop.SensorData sensorData, ref KinectInterop.BodyData bodyData) |
| 733 |
{
|
| 734 |
// no fixes are needed |
| 735 |
} |
| 736 |
|
| 737 |
public bool IsBodyTurned(ref KinectInterop.BodyData bodyData) |
| 738 |
{
|
| 739 |
//face = On: Face (357.0/1.0) |
| 740 |
//face = Off |
| 741 |
//| Head_px <= -0.02 |
| 742 |
//| | Neck_dx <= 0.08: Face (46.0/1.0) |
| 743 |
//| | Neck_dx > 0.08: Back (3.0) |
| 744 |
//| Head_px > -0.02 |
| 745 |
//| | SpineShoulder_px <= -0.02: Face (4.0) |
| 746 |
//| | SpineShoulder_px > -0.02: Back (64.0/1.0) |
| 747 |
|
| 748 |
bool bBodyTurned = false; |
| 749 |
|
| 750 |
if(bFaceTrackingInited) |
| 751 |
{
|
| 752 |
bool bFaceOn = IsFaceTracked(bodyData.liTrackingID); |
| 753 |
|
| 754 |
if(bFaceOn) |
| 755 |
{
|
| 756 |
bBodyTurned = false; |
| 757 |
} |
| 758 |
else |
| 759 |
{
|
| 760 |
// face = Off |
| 761 |
if(bodyData.joint[(int)KinectInterop.JointType.Head].posRel.x <= -0.02f) |
| 762 |
{
|
| 763 |
bBodyTurned = (bodyData.joint[(int)KinectInterop.JointType.Neck].posVel.x > 0.08f); |
| 764 |
} |
| 765 |
else |
| 766 |
{
|
| 767 |
// Head_px > -0.02 |
| 768 |
bBodyTurned = (bodyData.joint[(int)KinectInterop.JointType.SpineShoulder].posRel.x > -0.02f); |
| 769 |
} |
| 770 |
} |
| 771 |
} |
| 772 |
|
| 773 |
return bBodyTurned; |
| 774 |
} |
| 775 |
|
| 776 |
public Vector2 MapSpacePointToDepthCoords (KinectInterop.SensorData sensorData, Vector3 spacePos) |
| 777 |
{
|
| 778 |
Vector2 vPoint = Vector2.zero; |
| 779 |
|
| 780 |
if(coordMapper != null) |
| 781 |
{
|
| 782 |
CameraSpacePoint camPoint = new CameraSpacePoint(); |
| 783 |
camPoint.X = spacePos.x; |
| 784 |
camPoint.Y = spacePos.y; |
| 785 |
camPoint.Z = spacePos.z; |
| 786 |
|
| 787 |
CameraSpacePoint[] camPoints = new CameraSpacePoint[1]; |
| 788 |
camPoints[0] = camPoint; |
| 789 |
|
| 790 |
DepthSpacePoint[] depthPoints = new DepthSpacePoint[1]; |
| 791 |
coordMapper.MapCameraPointsToDepthSpace(camPoints, depthPoints); |
| 792 |
|
| 793 |
DepthSpacePoint depthPoint = depthPoints[0]; |
| 794 |
|
| 795 |
if(depthPoint.X >= 0 && depthPoint.X < sensorData.depthImageWidth && |
| 796 |
depthPoint.Y >= 0 && depthPoint.Y < sensorData.depthImageHeight) |
| 797 |
{
|
| 798 |
vPoint.x = depthPoint.X; |
| 799 |
vPoint.y = depthPoint.Y; |
| 800 |
} |
| 801 |
} |
| 802 |
|
| 803 |
return vPoint; |
| 804 |
} |
| 805 |
|
| 806 |
public Vector3 MapDepthPointToSpaceCoords (KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal) |
| 807 |
{
|
| 808 |
Vector3 vPoint = Vector3.zero; |
| 809 |
|
| 810 |
if(coordMapper != null && depthPos != Vector2.zero) |
| 811 |
{
|
| 812 |
DepthSpacePoint depthPoint = new DepthSpacePoint(); |
| 813 |
depthPoint.X = depthPos.x; |
| 814 |
depthPoint.Y = depthPos.y; |
| 815 |
|
| 816 |
DepthSpacePoint[] depthPoints = new DepthSpacePoint[1]; |
| 817 |
depthPoints[0] = depthPoint; |
| 818 |
|
| 819 |
ushort[] depthVals = new ushort[1]; |
| 820 |
depthVals[0] = depthVal; |
| 821 |
|
| 822 |
CameraSpacePoint[] camPoints = new CameraSpacePoint[1]; |
| 823 |
coordMapper.MapDepthPointsToCameraSpace(depthPoints, depthVals, camPoints); |
| 824 |
|
| 825 |
CameraSpacePoint camPoint = camPoints[0]; |
| 826 |
vPoint.x = camPoint.X; |
| 827 |
vPoint.y = camPoint.Y; |
| 828 |
vPoint.z = camPoint.Z; |
| 829 |
} |
| 830 |
|
| 831 |
return vPoint; |
| 832 |
} |
| 833 |
|
| 834 |
public bool MapDepthFrameToSpaceCoords (KinectInterop.SensorData sensorData, ref Vector3[] vSpaceCoords) |
| 835 |
{
|
| 836 |
if(coordMapper != null && sensorData.depthImage != null) |
| 837 |
{
|
| 838 |
var pDepthData = GCHandle.Alloc(sensorData.depthImage, GCHandleType.Pinned); |
| 839 |
var pSpaceCoordsData = GCHandle.Alloc(vSpaceCoords, GCHandleType.Pinned); |
| 840 |
|
| 841 |
coordMapper.MapDepthFrameToCameraSpaceUsingIntPtr( |
| 842 |
pDepthData.AddrOfPinnedObject(), |
| 843 |
sensorData.depthImage.Length * sizeof(ushort), |
| 844 |
pSpaceCoordsData.AddrOfPinnedObject(), |
| 845 |
(uint)vSpaceCoords.Length); |
| 846 |
|
| 847 |
pSpaceCoordsData.Free(); |
| 848 |
pDepthData.Free(); |
| 849 |
|
| 850 |
return true; |
| 851 |
} |
| 852 |
|
| 853 |
return false; |
| 854 |
} |
| 855 |
|
| 856 |
public Vector2 MapDepthPointToColorCoords (KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal) |
| 857 |
{
|
| 858 |
Vector2 vPoint = Vector2.zero; |
| 859 |
|
| 860 |
if(coordMapper != null && depthPos != Vector2.zero) |
| 861 |
{
|
| 862 |
DepthSpacePoint depthPoint = new DepthSpacePoint(); |
| 863 |
depthPoint.X = depthPos.x; |
| 864 |
depthPoint.Y = depthPos.y; |
| 865 |
|
| 866 |
DepthSpacePoint[] depthPoints = new DepthSpacePoint[1]; |
| 867 |
depthPoints[0] = depthPoint; |
| 868 |
|
| 869 |
ushort[] depthVals = new ushort[1]; |
| 870 |
depthVals[0] = depthVal; |
| 871 |
|
| 872 |
ColorSpacePoint[] colPoints = new ColorSpacePoint[1]; |
| 873 |
coordMapper.MapDepthPointsToColorSpace(depthPoints, depthVals, colPoints); |
| 874 |
|
| 875 |
ColorSpacePoint colPoint = colPoints[0]; |
| 876 |
vPoint.x = colPoint.X; |
| 877 |
vPoint.y = colPoint.Y; |
| 878 |
} |
| 879 |
|
| 880 |
return vPoint; |
| 881 |
} |
| 882 |
|
| 883 |
public bool MapDepthFrameToColorCoords (KinectInterop.SensorData sensorData, ref Vector2[] vColorCoords) |
| 884 |
{
|
| 885 |
if(coordMapper != null && sensorData.colorImage != null && sensorData.depthImage != null) |
| 886 |
{
|
| 887 |
var pDepthData = GCHandle.Alloc(sensorData.depthImage, GCHandleType.Pinned); |
| 888 |
var pColorCoordsData = GCHandle.Alloc(vColorCoords, GCHandleType.Pinned); |
| 889 |
|
| 890 |
coordMapper.MapDepthFrameToColorSpaceUsingIntPtr( |
| 891 |
pDepthData.AddrOfPinnedObject(), |
| 892 |
sensorData.depthImage.Length * sizeof(ushort), |
| 893 |
pColorCoordsData.AddrOfPinnedObject(), |
| 894 |
(uint)vColorCoords.Length); |
| 895 |
|
| 896 |
pColorCoordsData.Free(); |
| 897 |
pDepthData.Free(); |
| 898 |
|
| 899 |
return true; |
| 900 |
} |
| 901 |
|
| 902 |
return false; |
| 903 |
} |
| 904 |
|
| 905 |
public bool MapColorFrameToDepthCoords (KinectInterop.SensorData sensorData, ref Vector2[] vDepthCoords) |
| 906 |
{
|
| 907 |
if(coordMapper != null && sensorData.colorImage != null && sensorData.depthImage != null) |
| 908 |
{
|
| 909 |
var pDepthData = GCHandle.Alloc(sensorData.depthImage, GCHandleType.Pinned); |
| 910 |
var pDepthCoordsData = GCHandle.Alloc(vDepthCoords, GCHandleType.Pinned); |
| 911 |
|
| 912 |
coordMapper.MapColorFrameToDepthSpaceUsingIntPtr( |
| 913 |
pDepthData.AddrOfPinnedObject(), |
| 914 |
(uint)sensorData.depthImage.Length * sizeof(ushort), |
| 915 |
pDepthCoordsData.AddrOfPinnedObject(), |
| 916 |
(uint)vDepthCoords.Length); |
| 917 |
|
| 918 |
pDepthCoordsData.Free(); |
| 919 |
pDepthData.Free(); |
| 920 |
|
| 921 |
return true; |
| 922 |
} |
| 923 |
|
| 924 |
return false; |
| 925 |
} |
| 926 |
|
| 927 |
// returns the index of the given joint in joint's array or -1 if joint is not applicable |
| 928 |
public int GetJointIndex(KinectInterop.JointType joint) |
| 929 |
{
|
| 930 |
return (int)joint; |
| 931 |
} |
| 932 |
|
| 933 |
// // returns the joint at given index |
| 934 |
// public KinectInterop.JointType GetJointAtIndex(int index) |
| 935 |
// {
|
| 936 |
// return (KinectInterop.JointType)(index); |
| 937 |
// } |
| 938 |
|
| 939 |
// returns the parent joint of the given joint |
| 940 |
public KinectInterop.JointType GetParentJoint(KinectInterop.JointType joint) |
| 941 |
{
|
| 942 |
switch(joint) |
| 943 |
{
|
| 944 |
case KinectInterop.JointType.SpineBase: |
| 945 |
return KinectInterop.JointType.SpineBase; |
| 946 |
|
| 947 |
case KinectInterop.JointType.Neck: |
| 948 |
return KinectInterop.JointType.SpineShoulder; |
| 949 |
|
| 950 |
case KinectInterop.JointType.SpineShoulder: |
| 951 |
return KinectInterop.JointType.SpineMid; |
| 952 |
|
| 953 |
case KinectInterop.JointType.ShoulderLeft: |
| 954 |
case KinectInterop.JointType.ShoulderRight: |
| 955 |
return KinectInterop.JointType.SpineShoulder; |
| 956 |
|
| 957 |
case KinectInterop.JointType.HipLeft: |
| 958 |
case KinectInterop.JointType.HipRight: |
| 959 |
return KinectInterop.JointType.SpineBase; |
| 960 |
|
| 961 |
case KinectInterop.JointType.HandTipLeft: |
| 962 |
return KinectInterop.JointType.HandLeft; |
| 963 |
|
| 964 |
case KinectInterop.JointType.ThumbLeft: |
| 965 |
return KinectInterop.JointType.WristLeft; |
| 966 |
|
| 967 |
case KinectInterop.JointType.HandTipRight: |
| 968 |
return KinectInterop.JointType.HandRight; |
| 969 |
|
| 970 |
case KinectInterop.JointType.ThumbRight: |
| 971 |
return KinectInterop.JointType.WristRight; |
| 972 |
} |
| 973 |
|
| 974 |
return (KinectInterop.JointType)((int)joint - 1); |
| 975 |
} |
| 976 |
|
| 977 |
// returns the next joint in the hierarchy, as to the given joint |
| 978 |
public KinectInterop.JointType GetNextJoint(KinectInterop.JointType joint) |
| 979 |
{
|
| 980 |
switch(joint) |
| 981 |
{
|
| 982 |
case KinectInterop.JointType.SpineBase: |
| 983 |
return KinectInterop.JointType.SpineMid; |
| 984 |
case KinectInterop.JointType.SpineMid: |
| 985 |
return KinectInterop.JointType.SpineShoulder; |
| 986 |
case KinectInterop.JointType.SpineShoulder: |
| 987 |
return KinectInterop.JointType.Neck; |
| 988 |
case KinectInterop.JointType.Neck: |
| 989 |
return KinectInterop.JointType.Head; |
| 990 |
|
| 991 |
case KinectInterop.JointType.ShoulderLeft: |
| 992 |
return KinectInterop.JointType.ElbowLeft; |
| 993 |
case KinectInterop.JointType.ElbowLeft: |
| 994 |
return KinectInterop.JointType.WristLeft; |
| 995 |
case KinectInterop.JointType.WristLeft: |
| 996 |
return KinectInterop.JointType.HandLeft; |
| 997 |
case KinectInterop.JointType.HandLeft: |
| 998 |
return KinectInterop.JointType.HandTipLeft; |
| 999 |
|
| 1000 |
case KinectInterop.JointType.ShoulderRight: |
| 1001 |
return KinectInterop.JointType.ElbowRight; |
| 1002 |
case KinectInterop.JointType.ElbowRight: |
| 1003 |
return KinectInterop.JointType.WristRight; |
| 1004 |
case KinectInterop.JointType.WristRight: |
| 1005 |
return KinectInterop.JointType.HandRight; |
| 1006 |
case KinectInterop.JointType.HandRight: |
| 1007 |
return KinectInterop.JointType.HandTipRight; |
| 1008 |
|
| 1009 |
case KinectInterop.JointType.HipLeft: |
| 1010 |
return KinectInterop.JointType.KneeLeft; |
| 1011 |
case KinectInterop.JointType.KneeLeft: |
| 1012 |
return KinectInterop.JointType.AnkleLeft; |
| 1013 |
case KinectInterop.JointType.AnkleLeft: |
| 1014 |
return KinectInterop.JointType.FootLeft; |
| 1015 |
|
| 1016 |
case KinectInterop.JointType.HipRight: |
| 1017 |
return KinectInterop.JointType.KneeRight; |
| 1018 |
case KinectInterop.JointType.KneeRight: |
| 1019 |
return KinectInterop.JointType.AnkleRight; |
| 1020 |
case KinectInterop.JointType.AnkleRight: |
| 1021 |
return KinectInterop.JointType.FootRight; |
| 1022 |
} |
| 1023 |
|
| 1024 |
return joint; // in case of end joint - Head, HandTipLeft, HandTipRight, FootLeft, FootRight |
| 1025 |
} |
| 1026 |
|
| 1027 |
public bool IsFaceTrackingAvailable(ref bool bNeedRestart) |
| 1028 |
{
|
| 1029 |
bool bOneCopied = false, bAllCopied = true; |
| 1030 |
string sTargetPath = "."; |
| 1031 |
|
| 1032 |
if(!KinectInterop.Is64bitArchitecture()) |
| 1033 |
{
|
| 1034 |
// 32 bit |
| 1035 |
sTargetPath = KinectInterop.GetTargetDllPath(".", false) + "/";
|
| 1036 |
|
| 1037 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 1038 |
dictFilesToUnzip["Kinect20.Face.dll"] = sTargetPath + "Kinect20.Face.dll"; |
| 1039 |
dictFilesToUnzip["KinectFaceUnityAddin.dll"] = sTargetPath + "KinectFaceUnityAddin.dll"; |
| 1040 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 1041 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 1042 |
|
| 1043 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x86.zip", ref bOneCopied, ref bAllCopied); |
| 1044 |
} |
| 1045 |
else |
| 1046 |
{
|
| 1047 |
//Debug.Log("Face - x64-architecture.");
|
| 1048 |
sTargetPath = KinectInterop.GetTargetDllPath(".", true) + "/";
|
| 1049 |
|
| 1050 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 1051 |
dictFilesToUnzip["Kinect20.Face.dll"] = sTargetPath + "Kinect20.Face.dll"; |
| 1052 |
dictFilesToUnzip["KinectFaceUnityAddin.dll"] = sTargetPath + "KinectFaceUnityAddin.dll"; |
| 1053 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 1054 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 1055 |
|
| 1056 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x64.zip", ref bOneCopied, ref bAllCopied); |
| 1057 |
} |
| 1058 |
|
| 1059 |
KinectInterop.UnzipResourceDirectory(sTargetPath, "NuiDatabase.zip", sTargetPath + "NuiDatabase"); |
| 1060 |
|
| 1061 |
bNeedRestart = (bOneCopied && bAllCopied); |
| 1062 |
|
| 1063 |
return true; |
| 1064 |
} |
| 1065 |
|
| 1066 |
public bool InitFaceTracking(bool bUseFaceModel, bool bDrawFaceRect) |
| 1067 |
{
|
| 1068 |
isDrawFaceRect = bDrawFaceRect; |
| 1069 |
|
| 1070 |
// // load the native dlls to make sure libraries are loaded (after previous finish-unload) |
| 1071 |
// KinectInterop.LoadNativeLib("Kinect20.Face.dll");
|
| 1072 |
// KinectInterop.LoadNativeLib("KinectFaceUnityAddin.dll");
|
| 1073 |
|
| 1074 |
// specify the required face frame results |
| 1075 |
FaceFrameFeatures faceFrameFeatures = |
| 1076 |
FaceFrameFeatures.BoundingBoxInColorSpace |
| 1077 |
//| FaceFrameFeatures.BoundingBoxInInfraredSpace |
| 1078 |
| FaceFrameFeatures.PointsInColorSpace |
| 1079 |
//| FaceFrameFeatures.PointsInInfraredSpace |
| 1080 |
| FaceFrameFeatures.RotationOrientation |
| 1081 |
| FaceFrameFeatures.FaceEngagement |
| 1082 |
| FaceFrameFeatures.Glasses |
| 1083 |
| FaceFrameFeatures.Happy |
| 1084 |
| FaceFrameFeatures.LeftEyeClosed |
| 1085 |
| FaceFrameFeatures.RightEyeClosed |
| 1086 |
| FaceFrameFeatures.LookingAway |
| 1087 |
| FaceFrameFeatures.MouthMoved |
| 1088 |
| FaceFrameFeatures.MouthOpen |
| 1089 |
; |
| 1090 |
|
| 1091 |
// create a face frame source + reader to track each face in the FOV |
| 1092 |
faceFrameSources = new FaceFrameSource[this.bodyCount]; |
| 1093 |
faceFrameReaders = new FaceFrameReader[this.bodyCount]; |
| 1094 |
|
| 1095 |
if(bUseFaceModel) |
| 1096 |
{
|
| 1097 |
hdFaceFrameSources = new HighDefinitionFaceFrameSource[this.bodyCount]; |
| 1098 |
hdFaceFrameReaders = new HighDefinitionFaceFrameReader[this.bodyCount]; |
| 1099 |
|
| 1100 |
hdFaceModels = new FaceModel[this.bodyCount]; |
| 1101 |
hdFaceAlignments = new FaceAlignment[this.bodyCount]; |
| 1102 |
} |
| 1103 |
|
| 1104 |
for (int i = 0; i < bodyCount; i++) |
| 1105 |
{
|
| 1106 |
// create the face frame source with the required face frame features and an initial tracking Id of 0 |
| 1107 |
faceFrameSources[i] = FaceFrameSource.Create(this.kinectSensor, 0, faceFrameFeatures); |
| 1108 |
|
| 1109 |
// open the corresponding reader |
| 1110 |
faceFrameReaders[i] = faceFrameSources[i].OpenReader(); |
| 1111 |
|
| 1112 |
if(bUseFaceModel) |
| 1113 |
{
|
| 1114 |
///////// HD Face |
| 1115 |
hdFaceFrameSources[i] = HighDefinitionFaceFrameSource.Create(this.kinectSensor); |
| 1116 |
hdFaceFrameReaders[i] = hdFaceFrameSources[i].OpenReader(); |
| 1117 |
|
| 1118 |
hdFaceModels[i] = FaceModel.Create(); |
| 1119 |
hdFaceAlignments[i] = FaceAlignment.Create(); |
| 1120 |
} |
| 1121 |
} |
| 1122 |
|
| 1123 |
// allocate storage to store face frame results for each face in the FOV |
| 1124 |
faceFrameResults = new FaceFrameResult[this.bodyCount]; |
| 1125 |
|
| 1126 |
// FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription; |
| 1127 |
// faceDisplayWidth = frameDescription.Width; |
| 1128 |
// faceDisplayHeight = frameDescription.Height; |
| 1129 |
|
| 1130 |
bFaceTrackingInited = true; |
| 1131 |
|
| 1132 |
return bFaceTrackingInited; |
| 1133 |
} |
| 1134 |
|
| 1135 |
public void FinishFaceTracking() |
| 1136 |
{
|
| 1137 |
if(faceFrameReaders != null) |
| 1138 |
{
|
| 1139 |
for (int i = 0; i < faceFrameReaders.Length; i++) |
| 1140 |
{
|
| 1141 |
if (faceFrameReaders[i] != null) |
| 1142 |
{
|
| 1143 |
faceFrameReaders[i].Dispose(); |
| 1144 |
faceFrameReaders[i] = null; |
| 1145 |
} |
| 1146 |
} |
| 1147 |
} |
| 1148 |
|
| 1149 |
if(faceFrameSources != null) |
| 1150 |
{
|
| 1151 |
for (int i = 0; i < faceFrameSources.Length; i++) |
| 1152 |
{
|
| 1153 |
faceFrameSources[i] = null; |
| 1154 |
} |
| 1155 |
} |
| 1156 |
|
| 1157 |
///////// HD Face |
| 1158 |
if(hdFaceFrameSources != null) |
| 1159 |
{
|
| 1160 |
for (int i = 0; i < hdFaceAlignments.Length; i++) |
| 1161 |
{
|
| 1162 |
hdFaceAlignments[i] = null; |
| 1163 |
} |
| 1164 |
|
| 1165 |
for (int i = 0; i < hdFaceModels.Length; i++) |
| 1166 |
{
|
| 1167 |
if (hdFaceModels[i] != null) |
| 1168 |
{
|
| 1169 |
hdFaceModels[i].Dispose(); |
| 1170 |
hdFaceModels[i] = null; |
| 1171 |
} |
| 1172 |
} |
| 1173 |
|
| 1174 |
for (int i = 0; i < hdFaceFrameReaders.Length; i++) |
| 1175 |
{
|
| 1176 |
if (hdFaceFrameReaders[i] != null) |
| 1177 |
{
|
| 1178 |
hdFaceFrameReaders[i].Dispose(); |
| 1179 |
hdFaceFrameReaders[i] = null; |
| 1180 |
} |
| 1181 |
} |
| 1182 |
|
| 1183 |
for (int i = 0; i < hdFaceFrameSources.Length; i++) |
| 1184 |
{
|
| 1185 |
//hdFaceFrameSources[i].Dispose(true); |
| 1186 |
hdFaceFrameSources[i] = null; |
| 1187 |
} |
| 1188 |
} |
| 1189 |
|
| 1190 |
bFaceTrackingInited = false; |
| 1191 |
|
| 1192 |
// // unload the native dlls to prevent hd-face-wrapper's memory leaks |
| 1193 |
// KinectInterop.DeleteNativeLib("KinectFaceUnityAddin.dll", true);
|
| 1194 |
// KinectInterop.DeleteNativeLib("Kinect20.Face.dll", true);
|
| 1195 |
|
| 1196 |
} |
| 1197 |
|
| 1198 |
public bool UpdateFaceTracking() |
| 1199 |
{
|
| 1200 |
if(bodyData == null || faceFrameSources == null || faceFrameReaders == null) |
| 1201 |
return false; |
| 1202 |
|
| 1203 |
for(int i = 0; i < this.bodyCount; i++) |
| 1204 |
{
|
| 1205 |
if(faceFrameSources[i] != null) |
| 1206 |
{
|
| 1207 |
if(!faceFrameSources[i].IsTrackingIdValid) |
| 1208 |
{
|
| 1209 |
faceFrameSources[i].TrackingId = 0; |
| 1210 |
} |
| 1211 |
|
| 1212 |
if(bodyData[i] != null && bodyData[i].IsTracked) |
| 1213 |
{
|
| 1214 |
faceFrameSources[i].TrackingId = bodyData[i].TrackingId; |
| 1215 |
} |
| 1216 |
} |
| 1217 |
|
| 1218 |
if (faceFrameReaders[i] != null) |
| 1219 |
{
|
| 1220 |
FaceFrame faceFrame = faceFrameReaders[i].AcquireLatestFrame(); |
| 1221 |
|
| 1222 |
if (faceFrame != null) |
| 1223 |
{
|
| 1224 |
int index = GetFaceSourceIndex(faceFrame.FaceFrameSource); |
| 1225 |
|
| 1226 |
if(ValidateFaceBox(faceFrame.FaceFrameResult)) |
| 1227 |
{
|
| 1228 |
faceFrameResults[index] = faceFrame.FaceFrameResult; |
| 1229 |
} |
| 1230 |
else |
| 1231 |
{
|
| 1232 |
faceFrameResults[index] = null; |
| 1233 |
} |
| 1234 |
|
| 1235 |
faceFrame.Dispose(); |
| 1236 |
faceFrame = null; |
| 1237 |
} |
| 1238 |
} |
| 1239 |
|
| 1240 |
///////// HD Face |
| 1241 |
if(hdFaceFrameSources != null && hdFaceFrameSources[i] != null) |
| 1242 |
{
|
| 1243 |
if(!hdFaceFrameSources[i].IsTrackingIdValid) |
| 1244 |
{
|
| 1245 |
hdFaceFrameSources[i].TrackingId = 0; |
| 1246 |
} |
| 1247 |
|
| 1248 |
if(bodyData[i] != null && bodyData[i].IsTracked) |
| 1249 |
{
|
| 1250 |
hdFaceFrameSources[i].TrackingId = bodyData[i].TrackingId; |
| 1251 |
} |
| 1252 |
} |
| 1253 |
|
| 1254 |
if(hdFaceFrameReaders != null && hdFaceFrameReaders[i] != null) |
| 1255 |
{
|
| 1256 |
HighDefinitionFaceFrame hdFaceFrame = hdFaceFrameReaders[i].AcquireLatestFrame(); |
| 1257 |
|
| 1258 |
if(hdFaceFrame != null) |
| 1259 |
{
|
| 1260 |
if(hdFaceFrame.IsFaceTracked && (hdFaceAlignments[i] != null)) |
| 1261 |
{
|
| 1262 |
hdFaceFrame.GetAndRefreshFaceAlignmentResult(hdFaceAlignments[i]); |
| 1263 |
} |
| 1264 |
|
| 1265 |
hdFaceFrame.Dispose(); |
| 1266 |
hdFaceFrame = null; |
| 1267 |
} |
| 1268 |
} |
| 1269 |
|
| 1270 |
} |
| 1271 |
|
| 1272 |
return true; |
| 1273 |
} |
| 1274 |
|
| 1275 |
private int GetFaceSourceIndex(FaceFrameSource faceFrameSource) |
| 1276 |
{
|
| 1277 |
int index = -1; |
| 1278 |
|
| 1279 |
for (int i = 0; i < this.bodyCount; i++) |
| 1280 |
{
|
| 1281 |
if (this.faceFrameSources[i] == faceFrameSource) |
| 1282 |
{
|
| 1283 |
index = i; |
| 1284 |
break; |
| 1285 |
} |
| 1286 |
} |
| 1287 |
|
| 1288 |
return index; |
| 1289 |
} |
| 1290 |
|
| 1291 |
private bool ValidateFaceBox(FaceFrameResult faceResult) |
| 1292 |
{
|
| 1293 |
bool isFaceValid = faceResult != null; |
| 1294 |
|
| 1295 |
if (isFaceValid) |
| 1296 |
{
|
| 1297 |
var faceBox = faceResult.FaceBoundingBoxInColorSpace; |
| 1298 |
//if (faceBox != null) |
| 1299 |
{
|
| 1300 |
// check if we have a valid rectangle within the bounds of the screen space |
| 1301 |
isFaceValid = (faceBox.Right - faceBox.Left) > 0 && |
| 1302 |
(faceBox.Bottom - faceBox.Top) > 0; // && |
| 1303 |
//faceBox.Right <= this.faceDisplayWidth && |
| 1304 |
//faceBox.Bottom <= this.faceDisplayHeight; |
| 1305 |
} |
| 1306 |
} |
| 1307 |
|
| 1308 |
return isFaceValid; |
| 1309 |
} |
| 1310 |
|
| 1311 |
public bool IsFaceTrackingActive() |
| 1312 |
{
|
| 1313 |
return bFaceTrackingInited; |
| 1314 |
} |
| 1315 |
|
| 1316 |
public bool IsDrawFaceRect() |
| 1317 |
{
|
| 1318 |
return isDrawFaceRect; |
| 1319 |
} |
| 1320 |
|
| 1321 |
public bool IsFaceTracked(long userId) |
| 1322 |
{
|
| 1323 |
for (int i = 0; i < this.bodyCount; i++) |
| 1324 |
{
|
| 1325 |
if(faceFrameSources != null && faceFrameSources[i] != null && faceFrameSources[i].TrackingId == (ulong)userId) |
| 1326 |
{
|
| 1327 |
if(faceFrameResults != null && faceFrameResults[i] != null) |
| 1328 |
{
|
| 1329 |
return true; |
| 1330 |
} |
| 1331 |
} |
| 1332 |
} |
| 1333 |
|
| 1334 |
return false; |
| 1335 |
} |
| 1336 |
|
| 1337 |
public bool GetFaceRect(long userId, ref Rect faceRect) |
| 1338 |
{
|
| 1339 |
for (int i = 0; i < this.bodyCount; i++) |
| 1340 |
{
|
| 1341 |
if(faceFrameSources != null && faceFrameSources[i] != null && faceFrameSources[i].TrackingId == (ulong)userId) |
| 1342 |
{
|
| 1343 |
if(faceFrameResults != null && faceFrameResults[i] != null) |
| 1344 |
{
|
| 1345 |
var faceBox = faceFrameResults[i].FaceBoundingBoxInColorSpace; |
| 1346 |
|
| 1347 |
//if (faceBox != null) |
| 1348 |
{
|
| 1349 |
faceRect.x = faceBox.Left; |
| 1350 |
faceRect.y = faceBox.Top; |
| 1351 |
faceRect.width = faceBox.Right - faceBox.Left; |
| 1352 |
faceRect.height = faceBox.Bottom - faceBox.Top; |
| 1353 |
|
| 1354 |
return true; |
| 1355 |
} |
| 1356 |
} |
| 1357 |
} |
| 1358 |
} |
| 1359 |
|
| 1360 |
return false; |
| 1361 |
} |
| 1362 |
|
| 1363 |
public void VisualizeFaceTrackerOnColorTex(Texture2D texColor) |
| 1364 |
{
|
| 1365 |
if(bFaceTrackingInited) |
| 1366 |
{
|
| 1367 |
for (int i = 0; i < this.bodyCount; i++) |
| 1368 |
{
|
| 1369 |
if(faceFrameSources != null && faceFrameSources[i] != null && faceFrameSources[i].IsTrackingIdValid) |
| 1370 |
{
|
| 1371 |
if(faceFrameResults != null && faceFrameResults[i] != null) |
| 1372 |
{
|
| 1373 |
var faceBox = faceFrameResults[i].FaceBoundingBoxInColorSpace; |
| 1374 |
|
| 1375 |
//if (faceBox != null) |
| 1376 |
{
|
| 1377 |
UnityEngine.Color color = UnityEngine.Color.magenta; |
| 1378 |
Vector2 pt1, pt2; |
| 1379 |
|
| 1380 |
// bottom |
| 1381 |
pt1.x = faceBox.Left; pt1.y = faceBox.Top; |
| 1382 |
pt2.x = faceBox.Right; pt2.y = pt1.y; |
| 1383 |
DrawLine(texColor, pt1, pt2, color); |
| 1384 |
|
| 1385 |
// right |
| 1386 |
pt1.x = pt2.x; pt1.y = pt2.y; |
| 1387 |
pt2.x = pt1.x; pt2.y = faceBox.Bottom; |
| 1388 |
DrawLine(texColor, pt1, pt2, color); |
| 1389 |
|
| 1390 |
// top |
| 1391 |
pt1.x = pt2.x; pt1.y = pt2.y; |
| 1392 |
pt2.x = faceBox.Left; pt2.y = pt1.y; |
| 1393 |
DrawLine(texColor, pt1, pt2, color); |
| 1394 |
|
| 1395 |
// left |
| 1396 |
pt1.x = pt2.x; pt1.y = pt2.y; |
| 1397 |
pt2.x = pt1.x; pt2.y = faceBox.Top; |
| 1398 |
DrawLine(texColor, pt1, pt2, color); |
| 1399 |
} |
| 1400 |
} |
| 1401 |
} |
| 1402 |
} |
| 1403 |
} |
| 1404 |
} |
| 1405 |
|
| 1406 |
private void DrawLine(Texture2D a_Texture, Vector2 ptStart, Vector2 ptEnd, UnityEngine.Color a_Color) |
| 1407 |
{
|
| 1408 |
KinectInterop.DrawLine(a_Texture, (int)ptStart.x, (int)ptStart.y, (int)ptEnd.x, (int)ptEnd.y, a_Color); |
| 1409 |
} |
| 1410 |
|
| 1411 |
public bool GetHeadPosition(long userId, ref Vector3 headPos) |
| 1412 |
{
|
| 1413 |
for (int i = 0; i < this.bodyCount; i++) |
| 1414 |
{
|
| 1415 |
if(bodyData[i].TrackingId == (ulong)userId && bodyData[i].IsTracked) |
| 1416 |
{
|
| 1417 |
CameraSpacePoint vHeadPos = bodyData[i].Joints[Windows.Kinect.JointType.Head].Position; |
| 1418 |
|
| 1419 |
if(vHeadPos.Z > 0f) |
| 1420 |
{
|
| 1421 |
headPos.x = vHeadPos.X; |
| 1422 |
headPos.y = vHeadPos.Y; |
| 1423 |
headPos.z = vHeadPos.Z; |
| 1424 |
|
| 1425 |
return true; |
| 1426 |
} |
| 1427 |
} |
| 1428 |
} |
| 1429 |
|
| 1430 |
return false; |
| 1431 |
} |
| 1432 |
|
| 1433 |
public bool GetHeadRotation(long userId, ref Quaternion headRot) |
| 1434 |
{
|
| 1435 |
for (int i = 0; i < this.bodyCount; i++) |
| 1436 |
{
|
| 1437 |
if(faceFrameSources != null && faceFrameSources[i] != null && faceFrameSources[i].TrackingId == (ulong)userId) |
| 1438 |
{
|
| 1439 |
if(faceFrameResults != null && faceFrameResults[i] != null) |
| 1440 |
{
|
| 1441 |
Windows.Kinect.Vector4 vHeadRot = faceFrameResults[i].FaceRotationQuaternion; |
| 1442 |
|
| 1443 |
if(vHeadRot.W > 0f) |
| 1444 |
{
|
| 1445 |
headRot = new Quaternion(vHeadRot.X, vHeadRot.Y, vHeadRot.Z, vHeadRot.W); |
| 1446 |
return true; |
| 1447 |
} |
| 1448 |
// else |
| 1449 |
// {
|
| 1450 |
// Debug.Log(string.Format("Bad rotation: ({0:F2}, {1:F2}, {2:F2}, {3:F2}})", vHeadRot.X, vHeadRot.Y, vHeadRot.Z, vHeadRot.W));
|
| 1451 |
// return false; |
| 1452 |
// } |
| 1453 |
|
| 1454 |
} |
| 1455 |
} |
| 1456 |
} |
| 1457 |
|
| 1458 |
return false; |
| 1459 |
} |
| 1460 |
|
| 1461 |
public bool GetAnimUnits(long userId, ref Dictionary<KinectInterop.FaceShapeAnimations, float> dictAU) |
| 1462 |
{
|
| 1463 |
for (int i = 0; i < this.bodyCount; i++) |
| 1464 |
{
|
| 1465 |
if(hdFaceFrameSources != null && hdFaceFrameSources[i] != null && hdFaceFrameSources[i].TrackingId == (ulong)userId) |
| 1466 |
{
|
| 1467 |
if(hdFaceAlignments != null && hdFaceAlignments[i] != null) |
| 1468 |
{
|
| 1469 |
foreach(Microsoft.Kinect.Face.FaceShapeAnimations akey in hdFaceAlignments[i].AnimationUnits.Keys) |
| 1470 |
{
|
| 1471 |
dictAU[(KinectInterop.FaceShapeAnimations)akey] = hdFaceAlignments[i].AnimationUnits[akey]; |
| 1472 |
} |
| 1473 |
|
| 1474 |
return true; |
| 1475 |
} |
| 1476 |
} |
| 1477 |
} |
| 1478 |
|
| 1479 |
return false; |
| 1480 |
} |
| 1481 |
|
| 1482 |
public bool GetShapeUnits(long userId, ref Dictionary<KinectInterop.FaceShapeDeformations, float> dictSU) |
| 1483 |
{
|
| 1484 |
for (int i = 0; i < this.bodyCount; i++) |
| 1485 |
{
|
| 1486 |
if(hdFaceFrameSources != null && hdFaceFrameSources[i] != null && hdFaceFrameSources[i].TrackingId == (ulong)userId) |
| 1487 |
{
|
| 1488 |
if(hdFaceModels != null && hdFaceModels[i] != null) |
| 1489 |
{
|
| 1490 |
foreach(Microsoft.Kinect.Face.FaceShapeDeformations skey in hdFaceModels[i].FaceShapeDeformations.Keys) |
| 1491 |
{
|
| 1492 |
dictSU[(KinectInterop.FaceShapeDeformations)skey] = hdFaceModels[i].FaceShapeDeformations[skey]; |
| 1493 |
} |
| 1494 |
|
| 1495 |
return true; |
| 1496 |
} |
| 1497 |
} |
| 1498 |
} |
| 1499 |
|
| 1500 |
return false; |
| 1501 |
} |
| 1502 |
|
| 1503 |
public int GetFaceModelVerticesCount(long userId) |
| 1504 |
{
|
| 1505 |
for (int i = 0; i < this.bodyCount; i++) |
| 1506 |
{
|
| 1507 |
if(hdFaceFrameSources != null && hdFaceFrameSources[i] != null && (hdFaceFrameSources[i].TrackingId == (ulong)userId || userId == 0)) |
| 1508 |
{
|
| 1509 |
if(hdFaceModels != null && hdFaceModels[i] != null) |
| 1510 |
{
|
| 1511 |
var vertices = hdFaceModels[i].CalculateVerticesForAlignment(hdFaceAlignments[i]); |
| 1512 |
int verticesCount = vertices.Count; |
| 1513 |
|
| 1514 |
return verticesCount; |
| 1515 |
} |
| 1516 |
} |
| 1517 |
} |
| 1518 |
|
| 1519 |
return 0; |
| 1520 |
} |
| 1521 |
|
| 1522 |
public bool GetFaceModelVertices(long userId, ref Vector3[] avVertices) |
| 1523 |
{
|
| 1524 |
for (int i = 0; i < this.bodyCount; i++) |
| 1525 |
{
|
| 1526 |
if(hdFaceFrameSources != null && hdFaceFrameSources[i] != null && (hdFaceFrameSources[i].TrackingId == (ulong)userId || userId == 0)) |
| 1527 |
{
|
| 1528 |
if(hdFaceModels != null && hdFaceModels[i] != null) |
| 1529 |
{
|
| 1530 |
var vertices = hdFaceModels[i].CalculateVerticesForAlignment(hdFaceAlignments[i]); |
| 1531 |
int verticesCount = vertices.Count; |
| 1532 |
|
| 1533 |
if(avVertices.Length == verticesCount) |
| 1534 |
{
|
| 1535 |
for(int v = 0; v < verticesCount; v++) |
| 1536 |
{
|
| 1537 |
avVertices[v].x = vertices[v].X; |
| 1538 |
avVertices[v].y = vertices[v].Y; |
| 1539 |
avVertices[v].z = vertices[v].Z; // -vertices[v].Z; |
| 1540 |
} |
| 1541 |
} |
| 1542 |
|
| 1543 |
return true; |
| 1544 |
} |
| 1545 |
} |
| 1546 |
} |
| 1547 |
|
| 1548 |
return false; |
| 1549 |
} |
| 1550 |
|
| 1551 |
public int GetFaceModelTrianglesCount() |
| 1552 |
{
|
| 1553 |
var triangleIndices = FaceModel.TriangleIndices; |
| 1554 |
int triangleLength = triangleIndices.Count; |
| 1555 |
|
| 1556 |
return triangleLength; |
| 1557 |
} |
| 1558 |
|
| 1559 |
public bool GetFaceModelTriangles(bool bMirrored, ref int[] avTriangles) |
| 1560 |
{
|
| 1561 |
var triangleIndices = FaceModel.TriangleIndices; |
| 1562 |
int triangleLength = triangleIndices.Count; |
| 1563 |
|
| 1564 |
if(avTriangles.Length >= triangleLength) |
| 1565 |
{
|
| 1566 |
for(int i = 0; i < triangleLength; i += 3) |
| 1567 |
{
|
| 1568 |
//avTriangles[i] = (int)triangleIndices[i]; |
| 1569 |
avTriangles[i] = (int)triangleIndices[i + 2]; |
| 1570 |
avTriangles[i + 1] = (int)triangleIndices[i + 1]; |
| 1571 |
avTriangles[i + 2] = (int)triangleIndices[i]; |
| 1572 |
} |
| 1573 |
|
| 1574 |
if(bMirrored) |
| 1575 |
{
|
| 1576 |
Array.Reverse(avTriangles); |
| 1577 |
} |
| 1578 |
|
| 1579 |
return true; |
| 1580 |
} |
| 1581 |
|
| 1582 |
return false; |
| 1583 |
} |
| 1584 |
|
| 1585 |
public bool IsSpeechRecognitionAvailable(ref bool bNeedRestart) |
| 1586 |
{
|
| 1587 |
bool bOneCopied = false, bAllCopied = true; |
| 1588 |
|
| 1589 |
if(!KinectInterop.Is64bitArchitecture()) |
| 1590 |
{
|
| 1591 |
//Debug.Log("Speech - x32-architecture.");
|
| 1592 |
string sTargetPath = KinectInterop.GetTargetDllPath(".", false) + "/";
|
| 1593 |
|
| 1594 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 1595 |
dictFilesToUnzip["Kinect2SpeechWrapper.dll"] = sTargetPath + "Kinect2SpeechWrapper.dll"; |
| 1596 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 1597 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 1598 |
|
| 1599 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x86.zip", ref bOneCopied, ref bAllCopied); |
| 1600 |
} |
| 1601 |
else |
| 1602 |
{
|
| 1603 |
//Debug.Log("Face - x64-architecture.");
|
| 1604 |
string sTargetPath = KinectInterop.GetTargetDllPath(".", true) + "/";
|
| 1605 |
|
| 1606 |
Dictionary<string, string> dictFilesToUnzip = new Dictionary<string, string>(); |
| 1607 |
dictFilesToUnzip["Kinect2SpeechWrapper.dll"] = sTargetPath + "Kinect2SpeechWrapper.dll"; |
| 1608 |
dictFilesToUnzip["msvcp110.dll"] = sTargetPath + "msvcp110.dll"; |
| 1609 |
dictFilesToUnzip["msvcr110.dll"] = sTargetPath + "msvcr110.dll"; |
| 1610 |
|
| 1611 |
KinectInterop.UnzipResourceFiles(dictFilesToUnzip, "KinectV2UnityAddin.x64.zip", ref bOneCopied, ref bAllCopied); |
| 1612 |
} |
| 1613 |
|
| 1614 |
bNeedRestart = (bOneCopied && bAllCopied); |
| 1615 |
|
| 1616 |
return true; |
| 1617 |
} |
| 1618 |
|
| 1619 |
public int InitSpeechRecognition(string sRecoCriteria, bool bUseKinect, bool bAdaptationOff) |
| 1620 |
{
|
| 1621 |
// if(kinectSensor != null) |
| 1622 |
// {
|
| 1623 |
// float fWaitTime = Time.realtimeSinceStartup + 5f; |
| 1624 |
// |
| 1625 |
// while(!kinectSensor.IsAvailable && Time.realtimeSinceStartup < fWaitTime) |
| 1626 |
// {
|
| 1627 |
// // wait |
| 1628 |
// } |
| 1629 |
// } |
| 1630 |
|
| 1631 |
return InitSpeechRecognizerNative(sRecoCriteria, bUseKinect, bAdaptationOff); |
| 1632 |
} |
| 1633 |
|
| 1634 |
public void FinishSpeechRecognition() |
| 1635 |
{
|
| 1636 |
FinishSpeechRecognizerNative(); |
| 1637 |
} |
| 1638 |
|
| 1639 |
public int UpdateSpeechRecognition() |
| 1640 |
{
|
| 1641 |
return UpdateSpeechRecognizerNative(); |
| 1642 |
} |
| 1643 |
|
| 1644 |
public int LoadSpeechGrammar(string sFileName, short iLangCode, bool bDynamic) |
| 1645 |
{
|
| 1646 |
return LoadSpeechGrammarNative(sFileName, iLangCode, bDynamic); |
| 1647 |
|
| 1648 |
// int hr = AddSpeechGrammarNative(sFileName, iLangCode, bDynamic); |
| 1649 |
// if(hr >= 0) |
| 1650 |
// {
|
| 1651 |
// hr = SetGrammarStateNative(sFileName, true); |
| 1652 |
// } |
| 1653 |
// |
| 1654 |
// return hr; |
| 1655 |
} |
| 1656 |
|
| 1657 |
public int AddGrammarPhrase(string sFromRule, string sToRule, string sPhrase, bool bClearRulePhrases, bool bCommitGrammar) |
| 1658 |
{
|
| 1659 |
return AddGrammarPhraseNative(sFromRule, sToRule, sPhrase, bClearRulePhrases, bCommitGrammar); |
| 1660 |
} |
| 1661 |
|
| 1662 |
public void SetSpeechConfidence(float fConfidence) |
| 1663 |
{
|
| 1664 |
SetSpeechConfidenceNative(fConfidence); |
| 1665 |
} |
| 1666 |
|
| 1667 |
public bool IsSpeechStarted() |
| 1668 |
{
|
| 1669 |
return IsSpeechStartedNative(); |
| 1670 |
} |
| 1671 |
|
| 1672 |
public bool IsSpeechEnded() |
| 1673 |
{
|
| 1674 |
return IsSpeechEndedNative(); |
| 1675 |
} |
| 1676 |
|
| 1677 |
public bool IsPhraseRecognized() |
| 1678 |
{
|
| 1679 |
return IsPhraseRecognizedNative(); |
| 1680 |
} |
| 1681 |
|
| 1682 |
public float GetPhraseConfidence() |
| 1683 |
{
|
| 1684 |
return GetPhraseConfidenceNative(); |
| 1685 |
} |
| 1686 |
|
| 1687 |
public string GetRecognizedPhraseTag() |
| 1688 |
{
|
| 1689 |
IntPtr pPhraseTag = GetRecognizedPhraseTagNative(); |
| 1690 |
string sPhraseTag = Marshal.PtrToStringUni(pPhraseTag); |
| 1691 |
|
| 1692 |
return sPhraseTag; |
| 1693 |
} |
| 1694 |
|
| 1695 |
public void ClearRecognizedPhrase() |
| 1696 |
{
|
| 1697 |
ClearRecognizedPhraseNative(); |
| 1698 |
} |
| 1699 |
|
| 1700 |
public bool IsBackgroundRemovalAvailable(ref bool bNeedRestart) |
| 1701 |
{
|
| 1702 |
bBackgroundRemovalInited = KinectInterop.IsOpenCvAvailable(ref bNeedRestart); |
| 1703 |
return bBackgroundRemovalInited; |
| 1704 |
} |
| 1705 |
|
| 1706 |
public bool InitBackgroundRemoval(KinectInterop.SensorData sensorData, bool isHiResPrefered) |
| 1707 |
{
|
| 1708 |
return KinectInterop.InitBackgroundRemoval(sensorData, isHiResPrefered); |
| 1709 |
} |
| 1710 |
|
| 1711 |
public void FinishBackgroundRemoval(KinectInterop.SensorData sensorData) |
| 1712 |
{
|
| 1713 |
KinectInterop.FinishBackgroundRemoval(sensorData); |
| 1714 |
bBackgroundRemovalInited = false; |
| 1715 |
} |
| 1716 |
|
| 1717 |
public bool UpdateBackgroundRemoval(KinectInterop.SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bAlphaTexOnly) |
| 1718 |
{
|
| 1719 |
return KinectInterop.UpdateBackgroundRemoval(sensorData, isHiResPrefered, defaultColor, bAlphaTexOnly); |
| 1720 |
} |
| 1721 |
|
| 1722 |
public bool IsBackgroundRemovalActive() |
| 1723 |
{
|
| 1724 |
return bBackgroundRemovalInited; |
| 1725 |
} |
| 1726 |
|
| 1727 |
public bool IsBRHiResSupported() |
| 1728 |
{
|
| 1729 |
return true; |
| 1730 |
} |
| 1731 |
|
| 1732 |
public Rect GetForegroundFrameRect(KinectInterop.SensorData sensorData, bool isHiResPrefered) |
| 1733 |
{
|
| 1734 |
return KinectInterop.GetForegroundFrameRect(sensorData, isHiResPrefered); |
| 1735 |
} |
| 1736 |
|
| 1737 |
public int GetForegroundFrameLength(KinectInterop.SensorData sensorData, bool isHiResPrefered) |
| 1738 |
{
|
| 1739 |
return KinectInterop.GetForegroundFrameLength(sensorData, isHiResPrefered); |
| 1740 |
} |
| 1741 |
|
| 1742 |
public bool PollForegroundFrame(KinectInterop.SensorData sensorData, bool isHiResPrefered, Color32 defaultColor, bool bLimitedUsers, ICollection<int> alTrackedIndexes, ref byte[] foregroundImage) |
| 1743 |
{
|
| 1744 |
return KinectInterop.PollForegroundFrame(sensorData, isHiResPrefered, defaultColor, bLimitedUsers, alTrackedIndexes, ref foregroundImage); |
| 1745 |
} |
| 1746 |
|
| 1747 |
} |
| 1748 |
#endif |