簡體   English   中英

Azure Kinect Body Tracking 是否可以接受來自不同深度相機的深度數據?

[英]Is it possible for Azure Kinect Body Tracking to accept depth data from a different depth camera?

這是在 Unity 項目中完成的。

我目前正在嘗試創建一個新的 Azure Kinect Capture 對象,並用來自 K2 創建的顏色、深度和 IR 幀的數據填充它。 由於當時不應該使用 Azure Kinect 攝像頭,因此我創建了一個假的 Azure Kinect Calibration,用於創建 Azure Kinect Bodytracking Tracker。

我遇到了一個問題,如果從 Kinect v2 中提取的數據成功入隊,項目就會掛起,如果調用彈出入隊的數據,項目會在運行后立即掛起。 我已經為 enqueue 和 pop 添加了超時,這修復了凍結,但是彈出的 BodyTracking Frame 對象從不包含主體。 我已經設置了一個場景,其中深度數據是可視化的,以確保它不會被扭曲或遮擋,並且看起來很好。

在我繼續嘗試完成這項工作之前,我想看看我是否在這里遺漏了什么,或者我正在做的事情是否可能。

假校准:

Calibration cal = new Calibration {
                DepthCameraCalibration = new CameraCalibration {
                    Extrinsics = new Extrinsics {
                        Rotation = new float[] { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },
                        Translation = new float[] { 0.0f, 0.0f, 0.0f }
                    },
                    Intrinsics = new Intrinsics {
                        Type = CalibrationModelType.BrownConrady,
                        ParameterCount = 14,
                        Parameters = new float[] { 264.902374f, 261.016541f, 251.993011f, 252.0128f, 0.5496079f, -0.0305904336f, -0.00340628251f, 0.893285751f, 0.07668319f, -0.01748066f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }
                    },
                    ResolutionWidth = 512,
                    ResolutionHeight = 512,
                    MetricRadius = 1.73999977f
                },
                ColorCameraCalibration = new CameraCalibration {
                    Extrinsics = new Extrinsics {
                        Rotation = new float[] { 0.9999973f, 0.00189682352f, -0.00130836014f, -0.00179401657f, 0.997216046f, 0.07454452f, 0.00144611555f, -0.07454198f, 0.9972168f },
                        Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f }
                    },
                    Intrinsics = new Intrinsics {
                        Type = CalibrationModelType.BrownConrady,
                        ParameterCount = 14,
                        Parameters = new float[] { 957.2569f, 551.9336f, 913.142334f, 913.1438f, 0.4421505f, -2.83680415f, 1.73018765f, 0.32017225f, -2.644007f, 1.643955f, 0.0f, 0.0f, -0.000281378743f, 0.000288581447f, 0.0f }
                    },
                    ResolutionWidth = 1920,
                    ResolutionHeight = 1080,
                    MetricRadius = 1.7f
                },
                DeviceExtrinsics = new Extrinsics[] { //Device Extrinsics calibration chunk
                    new Extrinsics(){ Rotation = new float[] { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },                                                                                   Translation = new float[] { 0.0f, 0.0f, 0.0f } },
                    new Extrinsics(){ Rotation = new float[] { 0.9999973f, 0.00189682352f, -0.00130836014f, -0.00179401657f, 0.997216046f, 0.07454452f, 0.00144611555f, -0.07454198f, 0.9972168f },      Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f } },
                    new Extrinsics(){ Rotation = new float[] { -0.000347044057f, 0.110655256f, -0.9938588f, -0.999971569f, -0.007524097f, -0.000488546968f, -0.00753195f, 0.9938304f, 0.110654727f },    Translation = new float[] { 0.0f, 0.0f, 0.0f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00211483915f, 0.106267117f, -0.994335353f, -0.999981642f, -0.005419674f, -0.00270606228f, -0.00567653868f, 0.994322836f, 0.1062537f },  Translation = new float[] { -51.137455f, 3.33257771f, 0.7745425f } },
                    new Extrinsics(){ Rotation = new float[] { 0.9999973f, -0.00179401657f, 0.00144611555f, 0.00189682352f, 0.997216046f, -0.07454198f, -0.00130836014f, 0.07454452f, 0.9972168f },      Translation = new float[] { 32.10354f, 2.81973743f, -3.82274985f } },
                    new Extrinsics(){ Rotation = new float[] { 0.99999994f, 0.0f, 0.0f, 0.0f, 0.99999994f, 0.0f, 0.0f, 0.0f, 1.0f },                                                                     Translation = new float[] { 0.0f, 0.0f, 0.0f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00116317568f, 0.0362610966f, -0.9993417f, -0.9999825f, -0.005745603f, -0.00137240067f, -0.00579158543f, 0.9993258f, 0.03625378f },      Translation = new float[] { 4.100151f, -32.1219749f, 2.13753319f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00361735234f, 0.0318452343f, -0.999486268f, -0.9999857f, -0.00381232449f, -0.00374062685f, -0.0039294865f, 0.9994855f, 0.0318309739f }, Translation = new float[] { -46.96882f, -28.77531f, 2.98985362f } },
                    new Extrinsics(){ Rotation = new float[] { -0.000347044057f, -0.999971569f, -0.00753195f, 0.110655256f, -0.007524097f, 0.9938304f, -0.9938588f, -0.000488546968f, 0.110654727f },    Translation = new float[] { 0.0f, 0.0f, 0.0f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00116317568f, -0.9999825f, -0.00579158543f, 0.0362610966f, -0.005745603f, 0.9993258f, -0.9993417f, -0.00137240067f, 0.03625378f },      Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f } },
                    new Extrinsics(){ Rotation = new float[] { 1.00000012f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.00000012f },                                                                     Translation = new float[] { 0.0f, 0.0f, 0.0f } },
                    new Extrinsics(){ Rotation = new float[] { 0.999987245f, -0.00242856354f, -0.0044323504f, 0.002436766f, 0.9999953f, 0.00184613629f, 0.00442783535f, -0.00185691414f, 0.9999885f },   Translation = new float[] { -51.137455f, 3.33257771f, 0.7745425f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00211483915f, -0.999981642f, -0.00567653868f, 0.106267117f, -0.005419674f, 0.994322836f, -0.994335353f, -0.00270606228f, 0.1062537f },  Translation = new float[] { 3.44506049f, 4.682146f, -50.92106f } },
                    new Extrinsics(){ Rotation = new float[] { 0.00361735234f, -0.9999857f, -0.0039294865f, 0.0318452343f, -0.00381232449f, 0.9994855f, -0.999486268f, -0.00374062685f, 0.0318309739f }, Translation = new float[] { -28.5932484f, -1.602283f, -47.1475f } },
                    new Extrinsics(){ Rotation = new float[] { 0.999987245f, 0.002436766f, 0.00442783535f, -0.00242856354f, 0.9999953f, -0.00185691414f, -0.0044323504f, 0.00184613629f, 0.9999885f },   Translation = new float[] { 51.125248f, -3.45531416f, -1.0073452f } },
                    new Extrinsics(){ Rotation = new float[] { 0.99999994f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },                                                                            Translation = new float[] { 0.0f, 0.0f, 0.0f } }
                },
                DepthMode = DepthMode.WFOV_2x2Binned,
                ColorResolution = ColorResolution.R1080p
            };
            #endregion

            tracker = Tracker.Create(cal, new TrackerConfiguration() {
                SensorOrientation = SensorOrientation.Default,
                ProcessingMode = TrackerProcessingMode.Gpu,
                GpuDeviceId = 0
            });

身體追蹤過程的深度:

timeSinceStart = DateTime.Now.Subtract(timeOfStart);


            colorFrame = colorFrameReader.AcquireLatestFrame();
            depthFrame = depthFrameReader.AcquireLatestFrame();
            irFrame = infraredFrameReader.AcquireLatestFrame();

            KinectCapture capture = new KinectCapture();

            if (colorFrame != null) {

                Image colorImage = new Image(ImageFormat.ColorBGRA32, colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height) {
                    DeviceTimestamp = timeSinceStart
                };
                colorImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;

                if (colorBuffer == null) {
                    FrameDescription description = colorFrame.ColorFrameSource.FrameDescription;
                    colorBuffer = new byte[description.BytesPerPixel * description.Width * description.Height];
                }

                colorFrame.CopyRawFrameDataToArray(colorBuffer);

                for (int i = 0; i < 2073600; i++) {
                    ushort uShort = BitConverter.ToUInt16(colorBuffer, i * 2);
                    colorImage.SetPixel<ushort>(i % 1080, i / 1080, uShort);
                }

                capture.Color = colorImage;
                colorFrame.Dispose();
            }

            if (depthFrame != null) {

                Image depthImage = new Image(ImageFormat.Depth16, 512, 512);
                depthImage.DeviceTimestamp = timeSinceStart;
                depthImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;



                if (depthBuffer == null) {
                    var description = depthFrame.DepthFrameSource.FrameDescription;
                    depthBuffer = new ushort[description.Width * description.Height];
                }

                depthFrame.CopyFrameDataToArray(depthBuffer);

                for (int i = 0; i < 262144; i++) {
                    if(i < depthBuffer.Length)
                    {
                        depthImage.SetPixel<ushort>(i % 512, i / 512, depthBuffer[i]);

                    } else
                    {
                        depthImage.SetPixel<ushort>(i % 512, i / 512, 0);
                    }

                }

                capture.Depth = depthImage;
                depthFrame.Dispose();
            }

            if (irFrame != null) {

                Image irImage = new Image(ImageFormat.IR16, 512, 512);
                irImage.DeviceTimestamp = timeSinceStart;
                irImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;

                if (irBuffer == null) {
                    var description = irFrame.InfraredFrameSource.FrameDescription;
                    irBuffer = new ushort[description.Width * description.Height];
                }

                irFrame.CopyFrameDataToArray(depthBuffer);

                for (int i = 0; i < 262144; i++) {
                    if (i < irBuffer.Length)
                    {
                        irImage.SetPixel<ushort>(i % 512, i / 512, irBuffer[i]);

                    }
                    else
                    {
                        irImage.SetPixel<ushort>(i % 512, i / 512, 0);
                    }
                }

                capture.IR = irImage;
                irFrame.Dispose();
            }

            capture.Temperature = 30.0f;

            try {
                if(capture.Color != null && capture.Depth != null && capture.IR != null)
                {
                    tracker.EnqueueCapture(capture, new TimeSpan(0, 0, 0, 0, 50));
                    Debug.Log("Successful Enqueue");
                }
                
            } catch (Exception ex) {
                Debug.Log($"Failed to enqeue\n{ex.Message}");
            }

            try {
                kFrame = tracker.PopResult(new TimeSpan(0, 0, 0, 15));
                
                Debug.Log("Bodies in frame: " + kFrame.NumberOfBodies);
            }
            catch (Exception ex) {
                Debug.Log($"Failed to pop from queue\n{ex.Message}");
            }

Body Tracking SDK 旨在與 Azure Kinect 設備配合使用。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM