t1 / TFDContents / Assets / KinectDemos / VisualizerDemo / Scripts / UserMeshVisualizer.cs @ 3
이력 | 보기 | 이력해설 | 다운로드 (12.3 KB)
| 1 | using UnityEngine; | 
|---|---|
| 2 | using System.Collections; | 
| 3 |  | 
| 4 |  | 
| 5 | public class UserMeshVisualizer : MonoBehaviour | 
| 6 | {
 | 
| 7 | 	[Tooltip("Index of the player, tracked by this component. -1 means all players, 0 - the 1st player only, 1 - the 2nd player only, etc.")]
 | 
| 8 | public int playerIndex = -1; | 
| 9 |  | 
| 10 | 	[Tooltip("Whether the mesh is facing the player or not.")]
 | 
| 11 | public bool mirroredMovement = true; | 
| 12 |  | 
| 13 | 	[Tooltip("Kinect origin position.")]
 | 
| 14 | public Vector3 originPosition = Vector3.zero; | 
| 15 |  | 
| 16 | 	[Tooltip("Whether the z-movement is inverted or not.")]
 | 
| 17 | public bool invertedZMovement = false; | 
| 18 |  | 
| 19 | 	[Tooltip("Smooth factor used for the camera re-orientation.")]
 | 
| 20 | public float smoothFactor = 0f; | 
| 21 |  | 
| 22 | 	[Tooltip("Camera that may be used to overlay the mesh over the color background.")]
 | 
| 23 | public Camera foregroundCamera; | 
| 24 |  | 
| 25 | 	[Tooltip("Whether to update the mesh collider as well, when the user mesh changes.")]
 | 
| 26 | public bool updateMeshCollider = false; | 
| 27 |  | 
| 28 | 	[Tooltip("Number of pixels per direction in a sample.")]
 | 
| 29 | private const int sampleSize = 2; | 
| 30 |  | 
| 31 |  | 
| 32 | private Mesh mesh; | 
| 33 | private Vector3[] vertices; | 
| 34 | private Vector2[] uvs; | 
| 35 | private int[] triangles; | 
| 36 |  | 
| 37 | private KinectManager manager = null; | 
| 38 |  | 
| 39 | private KinectInterop.SensorData sensorData = null; | 
| 40 | //private Vector3[] spaceCoords = null; | 
| 41 | private Matrix4x4 kinectToWorld = Matrix4x4.identity; | 
| 42 |  | 
| 43 | private int depthWidth = 0; | 
| 44 | private int depthHeight = 0; | 
| 45 |  | 
| 46 | private int sampledWidth = 0; | 
| 47 | private int sampledHeight = 0; | 
| 48 |  | 
| 49 | private long userId = 0; | 
| 50 | private byte userBodyIndex = 255; | 
| 51 | private Vector3 userMeshPos = Vector3.zero; | 
| 52 |  | 
| 53 | private byte[] vertexType; | 
| 54 | private int[] vertexIndex; | 
| 55 |  | 
| 56 |  | 
| 57 | void Start() | 
| 58 |     {
 | 
| 59 | manager = KinectManager.Instance; | 
| 60 |  | 
| 61 | if (manager != null) | 
| 62 |         {
 | 
| 63 | sensorData = manager.GetSensorData(); | 
| 64 |  | 
| 65 | depthWidth = manager.GetDepthImageWidth(); | 
| 66 | depthHeight = manager.GetDepthImageHeight(); | 
| 67 |  | 
| 68 | sampledWidth = depthWidth / sampleSize; | 
| 69 | sampledHeight = depthHeight / sampleSize; | 
| 70 |  | 
| 71 | //spaceCoords = new Vector3[depthWidth * depthHeight]; | 
| 72 |  | 
| 73 | if(sensorData.depth2SpaceCoords == null) | 
| 74 | 			{
 | 
| 75 | sensorData.depth2SpaceCoords = new Vector3[depthWidth * depthHeight]; | 
| 76 | } | 
| 77 |  | 
| 78 | vertexType = new byte[sampledWidth * sampledHeight]; | 
| 79 | vertexIndex = new int[sampledWidth * sampledHeight]; | 
| 80 |  | 
| 81 | CreateMesh(sampledWidth, sampledHeight); | 
| 82 | } | 
| 83 | } | 
| 84 |  | 
| 85 | private void CreateMesh(int width, int height) | 
| 86 |     {
 | 
| 87 | mesh = new Mesh(); | 
| 88 | mesh.name = "UserMesh"; | 
| 89 |  | 
| 90 | GetComponent<MeshFilter>().mesh = mesh; | 
| 91 | } | 
| 92 |  | 
| 93 | void Update() | 
| 94 |     {
 | 
| 95 | if (manager == null || !manager.IsInitialized()) | 
| 96 | return; | 
| 97 |  | 
| 98 | // get user texture | 
| 99 | Renderer renderer = GetComponent<Renderer>(); | 
| 100 | if(renderer && renderer.material && renderer.material.mainTexture == null) | 
| 101 | 		{
 | 
| 102 | BackgroundRemovalManager backManager = BackgroundRemovalManager.Instance; | 
| 103 | renderer.material.mainTexture = backManager ? (Texture)sensorData.depth2ColorTexture : (Texture)manager.GetUsersLblTex(); | 
| 104 | } | 
| 105 |  | 
| 106 | // get kinect-to-world matrix | 
| 107 | kinectToWorld = manager.GetKinectToWorldMatrix(); | 
| 108 |  | 
| 109 | if(playerIndex >= 0) | 
| 110 | 		{
 | 
| 111 | long lastUserId = userId; | 
| 112 | userId = manager.GetUserIdByIndex(playerIndex); | 
| 113 |  | 
| 114 | userBodyIndex = (byte)manager.GetBodyIndexByUserId(userId); | 
| 115 | if(userBodyIndex == 255) | 
| 116 | userBodyIndex = 222; | 
| 117 |  | 
| 118 | // check for color overlay | 
| 119 | if (foregroundCamera) | 
| 120 | 			{
 | 
| 121 | // get the background rectangle (use the portrait background, if available) | 
| 122 | Rect backgroundRect = foregroundCamera.pixelRect; | 
| 123 | PortraitBackground portraitBack = PortraitBackground.Instance; | 
| 124 |  | 
| 125 | if (portraitBack && portraitBack.enabled) | 
| 126 | 				{
 | 
| 127 | backgroundRect = portraitBack.GetBackgroundRect (); | 
| 128 | } | 
| 129 |  | 
| 130 | // get user position | 
| 131 | userMeshPos = manager.GetJointPosColorOverlay(userId, (int)KinectInterop.JointType.SpineBase, foregroundCamera, backgroundRect); | 
| 132 | } | 
| 133 | else | 
| 134 | 			{
 | 
| 135 | // get user position | 
| 136 | userMeshPos = manager.GetJointKinectPosition(userId, (int)KinectInterop.JointType.SpineBase); | 
| 137 | } | 
| 138 |  | 
| 139 | if(!mirroredMovement) | 
| 140 | 			{
 | 
| 141 | userMeshPos.x = -userMeshPos.x; | 
| 142 | } | 
| 143 |  | 
| 144 | if (foregroundCamera == null) | 
| 145 | 			{
 | 
| 146 | // convert kinect pos to world coords, when there is no color overlay | 
| 147 | userMeshPos = kinectToWorld.MultiplyPoint3x4(userMeshPos); | 
| 148 | } | 
| 149 |  | 
| 150 | // set transform position | 
| 151 | Vector3 newUserPos = userMeshPos + originPosition; // manager.GetJointPosition(userId, (int)KinectInterop.JointType.SpineBase) + originPosition; | 
| 152 |  | 
| 153 | if(invertedZMovement) | 
| 154 | 			{
 | 
| 155 | newUserPos.z = -newUserPos.z; | 
| 156 | } | 
| 157 |  | 
| 158 | transform.position = lastUserId != 0 && smoothFactor != 0f ? Vector3.Lerp(transform.position, newUserPos, smoothFactor * Time.deltaTime) : newUserPos; | 
| 159 | } | 
| 160 | else | 
| 161 | 		{
 | 
| 162 | userId = 0; | 
| 163 | userBodyIndex = 255; | 
| 164 | userMeshPos = Vector3.zero; | 
| 165 | } | 
| 166 |  | 
| 167 | // update the mesh | 
| 168 | UpdateMesh(); | 
| 169 | } | 
| 170 |  | 
| 171 | private void UpdateMesh() | 
| 172 |     {
 | 
| 173 | if(sensorData.depthImage != null && sensorData.bodyIndexImage != null && | 
| 174 | sensorData.depth2SpaceCoords != null && sensorData.spaceCoordsBufferReady) | 
| 175 | 		{
 | 
| 176 | int vCount = 0, tCount = 0; | 
| 177 | EstimateUserVertices(out vCount, out tCount); | 
| 178 |  | 
| 179 | vertices = new Vector3[vCount]; | 
| 180 | uvs = new Vector2[vCount]; | 
| 181 | triangles = new int[6 * tCount]; | 
| 182 |  | 
| 183 | int index = 0, vIndex = 0, tIndex = 0, xyIndex = 0; | 
| 184 | for (int y = 0; y < depthHeight; y += sampleSize) | 
| 185 | 			{
 | 
| 186 | int xyStartIndex = xyIndex; | 
| 187 |  | 
| 188 | for (int x = 0; x < depthWidth; x += sampleSize) | 
| 189 | 				{
 | 
| 190 | //Vector3 vSpacePos = spaceCoords[xyIndex]; | 
| 191 | Vector3 vSpacePos = sensorData.depth2SpaceCoords[xyIndex]; | 
| 192 |  | 
| 193 | if(vertexType[index] != 0 && | 
| 194 | !float.IsInfinity(vSpacePos.x) && !float.IsInfinity(vSpacePos.y) && !float.IsInfinity(vSpacePos.z)) | 
| 195 | 					{
 | 
| 196 | // check for color overlay | 
| 197 | if (foregroundCamera) | 
| 198 | 						{
 | 
| 199 | // get the background rectangle (use the portrait background, if available) | 
| 200 | Rect backgroundRect = foregroundCamera.pixelRect; | 
| 201 | PortraitBackground portraitBack = PortraitBackground.Instance; | 
| 202 |  | 
| 203 | if(portraitBack && portraitBack.enabled) | 
| 204 | 							{
 | 
| 205 | backgroundRect = portraitBack.GetBackgroundRect(); | 
| 206 | } | 
| 207 |  | 
| 208 | Vector2 vColorPos = sensorData.depth2ColorCoords[xyIndex]; | 
| 209 | ushort depthValue = sensorData.depthImage[xyIndex]; | 
| 210 |  | 
| 211 | if(!float.IsInfinity(vColorPos.x) && !float.IsInfinity(vColorPos.y) && depthValue > 0) | 
| 212 | 							{
 | 
| 213 | float xScaled = (float)vColorPos.x * backgroundRect.width / sensorData.colorImageWidth; | 
| 214 | float yScaled = (float)vColorPos.y * backgroundRect.height / sensorData.colorImageHeight; | 
| 215 |  | 
| 216 | float xScreen = backgroundRect.x + xScaled; | 
| 217 | float yScreen = backgroundRect.y + backgroundRect.height - yScaled; | 
| 218 | float zDistance = (float)depthValue / 1000f; | 
| 219 |  | 
| 220 | vSpacePos = foregroundCamera.ScreenToWorldPoint(new Vector3(xScreen, yScreen, zDistance)); | 
| 221 | } | 
| 222 | } | 
| 223 |  | 
| 224 | if(!mirroredMovement) | 
| 225 | 						{
 | 
| 226 | vSpacePos.x = -vSpacePos.x; | 
| 227 | } | 
| 228 |  | 
| 229 | if(foregroundCamera == null) | 
| 230 | 						{
 | 
| 231 | // convert space to world coords, when there is no color overlay | 
| 232 | vSpacePos = kinectToWorld.MultiplyPoint3x4(vSpacePos); | 
| 233 | } | 
| 234 |  | 
| 235 | vertices[vIndex] = vSpacePos - userMeshPos; | 
| 236 | uvs[vIndex] = new Vector2((float)x / depthWidth, (float)y / depthHeight); | 
| 237 | vIndex++; | 
| 238 |  | 
| 239 | if(vertexType[index] == 3) | 
| 240 | 						{
 | 
| 241 | if(mirroredMovement) | 
| 242 | 							{
 | 
| 243 | triangles[tIndex++] = vertexIndex[index]; // top left | 
| 244 | triangles[tIndex++] = vertexIndex[index + 1]; // top right | 
| 245 | triangles[tIndex++] = vertexIndex[index + sampledWidth]; // bottom left | 
| 246 |  | 
| 247 | triangles[tIndex++] = vertexIndex[index + sampledWidth]; // bottom left | 
| 248 | triangles[tIndex++] = vertexIndex[index + 1]; // top right | 
| 249 | triangles[tIndex++] = vertexIndex[index + sampledWidth + 1]; // bottom right | 
| 250 | } | 
| 251 | else | 
| 252 | 							{
 | 
| 253 | triangles[tIndex++] = vertexIndex[index + 1]; // top left | 
| 254 | triangles[tIndex++] = vertexIndex[index]; // top right | 
| 255 | triangles[tIndex++] = vertexIndex[index + sampledWidth + 1]; // bottom left | 
| 256 |  | 
| 257 | triangles[tIndex++] = vertexIndex[index + sampledWidth + 1]; // bottom left | 
| 258 | triangles[tIndex++] = vertexIndex[index]; // top right | 
| 259 | triangles[tIndex++] = vertexIndex[index + sampledWidth]; // bottom right | 
| 260 | } | 
| 261 | } | 
| 262 | } | 
| 263 |  | 
| 264 | index++; | 
| 265 | xyIndex += sampleSize; | 
| 266 | } | 
| 267 |  | 
| 268 | xyIndex = xyStartIndex + sampleSize * depthWidth; | 
| 269 | } | 
| 270 |  | 
| 271 | // buffer is released | 
| 272 | lock(sensorData.spaceCoordsBufferLock) | 
| 273 | 			{
 | 
| 274 | sensorData.spaceCoordsBufferReady = false; | 
| 275 | } | 
| 276 |  | 
| 277 | mesh.Clear(); | 
| 278 | mesh.vertices = vertices; | 
| 279 | mesh.uv = uvs; | 
| 280 | //mesh.normals = normals; | 
| 281 | mesh.triangles = triangles; | 
| 282 | mesh.RecalculateNormals(); | 
| 283 | mesh.RecalculateBounds(); | 
| 284 |  | 
| 285 | if (updateMeshCollider) | 
| 286 | 			{
 | 
| 287 | MeshCollider meshCollider = GetComponent<MeshCollider>(); | 
| 288 |  | 
| 289 | if (meshCollider) | 
| 290 | 				{
 | 
| 291 | meshCollider.sharedMesh = null; | 
| 292 | meshCollider.sharedMesh = mesh; | 
| 293 | } | 
| 294 | } | 
| 295 | } | 
| 296 | } | 
| 297 |  | 
| 298 | // // gets the average depth of the sample block | 
| 299 | // private ushort GetSampleDepth(int x, int y) | 
| 300 | //    {
 | 
| 301 | // int depthSum = 0, count = 0; | 
| 302 | // int startIndex = y * depthWidth + x; | 
| 303 | // | 
| 304 | // //for (int y1 = 0; y1 < SampleSize; y1++) | 
| 305 | //        {
 | 
| 306 | // int pixelIndex = startIndex; | 
| 307 | // | 
| 308 | // //for (int x1 = 0; x1 < SampleSize; x1++) | 
| 309 | //            {
 | 
| 310 | // //if(sensorData.bodyIndexImage[pixelIndex] != 255) | 
| 311 | //				{
 | 
| 312 | // //if(userBodyIndex == 255 || sensorData.bodyIndexImage[pixelIndex] == userBodyIndex) | 
| 313 | //					{
 | 
| 314 | // depthSum += sensorData.depthImage[pixelIndex]; | 
| 315 | // count++; | 
| 316 | // } | 
| 317 | // } | 
| 318 | // | 
| 319 | // pixelIndex++; | 
| 320 | // } | 
| 321 | // | 
| 322 | // pixelIndex += depthWidth; | 
| 323 | // } | 
| 324 | // | 
| 325 | // return (ushort)(count > 0 ? (count > 1 ? depthSum / count : depthSum) : 0); | 
| 326 | // } | 
| 327 |  | 
| 328 |  | 
| 329 | // estimates which and how many sampled vertices are valid | 
| 330 | private void EstimateUserVertices(out int count1, out int count3) | 
| 331 | 	{
 | 
| 332 | System.Array.Clear(vertexType, 0, vertexType.Length); | 
| 333 |  | 
| 334 | Vector3[] vSpacePos = new Vector3[4]; | 
| 335 | int rowIndex = 0; | 
| 336 |  | 
| 337 | for (int y = 0; y < sampledHeight - 1; y++) | 
| 338 | 		{
 | 
| 339 | int pixIndex = rowIndex; | 
| 340 |  | 
| 341 | for (int x = 0; x < sampledWidth - 1; x++) | 
| 342 | 			{
 | 
| 343 | if(IsUserSampleValid(x, y, ref vSpacePos[0]) && IsUserSampleValid(x + 1, y, ref vSpacePos[1]) && | 
| 344 | IsUserSampleValid(x, y + 1, ref vSpacePos[2]) && IsUserSampleValid(x + 1, y + 1, ref vSpacePos[3])) | 
| 345 | 				{
 | 
| 346 | if(IsSpacePointsClose(vSpacePos, 0.01f)) | 
| 347 | 					{
 | 
| 348 | vertexType[pixIndex] = 3; | 
| 349 |  | 
| 350 | vertexType[pixIndex + 1] = 1; | 
| 351 | vertexType[pixIndex + sampledWidth] = 1; | 
| 352 | vertexType[pixIndex + sampledWidth + 1] = 1; | 
| 353 | } | 
| 354 | } | 
| 355 |  | 
| 356 | pixIndex++; | 
| 357 | } | 
| 358 |  | 
| 359 | rowIndex += sampledWidth; | 
| 360 | } | 
| 361 |  | 
| 362 | // estimate counts | 
| 363 | count1 = 0; | 
| 364 | count3 = 0; | 
| 365 |  | 
| 366 | for(int i = 0; i < vertexType.Length; i++) | 
| 367 | 		{
 | 
| 368 | if(vertexType[i] != 0) | 
| 369 | 			{
 | 
| 370 | vertexIndex[i] = count1; | 
| 371 | count1++; | 
| 372 | } | 
| 373 | else | 
| 374 | 			{
 | 
| 375 | vertexIndex[i] = 0; | 
| 376 | } | 
| 377 |  | 
| 378 | if(vertexType[i] == 3) | 
| 379 | 			{
 | 
| 380 | count3++; | 
| 381 | } | 
| 382 | } | 
| 383 | } | 
| 384 |  | 
| 385 | // checks if the space points are closer to each other than the minimum squared distance | 
| 386 | private bool IsSpacePointsClose(Vector3[] vSpacePos, float fMinDistSquared) | 
| 387 | 	{
 | 
| 388 | int iPosLength = vSpacePos.Length; | 
| 389 |  | 
| 390 | for(int i = 0; i < iPosLength; i++) | 
| 391 | 		{
 | 
| 392 | for(int j = i + 1; j < iPosLength; j++) | 
| 393 | 			{
 | 
| 394 | Vector3 vDist = vSpacePos[j] - vSpacePos[i]; | 
| 395 | if(vDist.sqrMagnitude > fMinDistSquared) | 
| 396 | return false; | 
| 397 | } | 
| 398 | } | 
| 399 |  | 
| 400 | return true; | 
| 401 | } | 
| 402 |  | 
| 403 | // checks whether this sample block is valid for this user | 
| 404 | private bool IsUserSampleValid(int x, int y, ref Vector3 vSpacePos) | 
| 405 | 	{
 | 
| 406 | int startIndex = y * sampleSize * depthWidth + x * sampleSize; | 
| 407 |  | 
| 408 | //for (int y1 = 0; y1 < SampleSize; y1++) | 
| 409 | 		{
 | 
| 410 | int pixelIndex = startIndex; | 
| 411 | //vSpacePos = spaceCoords[pixelIndex]; | 
| 412 | vSpacePos = sensorData.depth2SpaceCoords[pixelIndex]; | 
| 413 |  | 
| 414 | //for (int x1 = 0; x1 < SampleSize; x1++) | 
| 415 | 			{
 | 
| 416 | if(userBodyIndex != 255) | 
| 417 | 				{
 | 
| 418 | if(sensorData.bodyIndexImage[pixelIndex] == userBodyIndex && | 
| 419 | !float.IsInfinity(vSpacePos.x) && !float.IsInfinity(vSpacePos.y) && !float.IsInfinity(vSpacePos.z)) | 
| 420 | 					{
 | 
| 421 | return true; | 
| 422 | } | 
| 423 | } | 
| 424 | else | 
| 425 | 				{
 | 
| 426 | if(sensorData.bodyIndexImage[pixelIndex] != 255 && | 
| 427 | !float.IsInfinity(vSpacePos.x) && !float.IsInfinity(vSpacePos.y) && !float.IsInfinity(vSpacePos.z)) | 
| 428 | 					{
 | 
| 429 | return true; | 
| 430 | } | 
| 431 | } | 
| 432 |  | 
| 433 | pixelIndex++; | 
| 434 | } | 
| 435 |  | 
| 436 | startIndex += depthWidth; | 
| 437 | } | 
| 438 |  | 
| 439 | return false; | 
| 440 | } | 
| 441 |  | 
| 442 | } |