簡體   English   中英

如何使用點雲庫捕獲一幀?

[英]How can I capture one frame using a Point Cloud Library?

        // Sample_PCL.cpp : Defines the entry point for the console application.
    //

    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 點群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>

    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }

    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OK\n");

        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OK\n");

        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OK\n");


        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OK\n");

        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OK\n");

        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OK\n");

        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OK\n");

        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OK\n");

        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080

                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);

        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OK\n");

        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424

                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);

        printf("Display Point Cloud\n");

        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  點群のウィンドウ表示

        while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);

            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);

            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造體
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 點群の數
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;

            for (int y = 0; y < depthHeight; y++) {
                for (int x = 0; x < depthWidth; x++) {
                    pcl::PointXYZRGB point;     // PCLで使用する點群情報

                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];

                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }

                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }

                    pointcloud->push_back(point);
                }
            }

            // Show Point Cloud on Cloud Viewer
            viewer.showCloud(pointcloud);

            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した點群を保存
                printf("Save Point Cloud Data\n");

                //break;
            }
        }

        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);

        printf("Disconnect Kinect Sensor\n");



        return 0;
    }

前面的代碼是從點雲庫網站的教程中獲取的代碼,它使用Kinect實時顯示Kinect看到的點雲。 因此,點雲在不斷變化。 這就是為什么我只想得到一個框架,換句話說,我希望點雲凍結而不是不斷捕獲新的框架。

這是我的修改。

        // Sample_PCL.cpp : Defines the entry point for the console application.
    //

    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 點群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>

    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }

    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OK\n");

        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OK\n");

        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OK\n");


        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OK\n");

        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OK\n");

        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OK\n");

        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OK\n");

        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OK\n");

        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080

                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);

        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OK\n");

        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424

                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);

        printf("Display Point Cloud\n");

        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  點群のウィンドウ表示

        //while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);

            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);



            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造體
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 點群の數
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;

            for (int y = 0; y < depthHeight; y++) {

                for (int x = 0; x < depthWidth; x++) {
                    //printf("scann\n");
                    pcl::PointXYZRGB point;     // PCLで使用する點群情報

                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];

                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }

                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }

                    pointcloud->push_back(point);
                }
            //}

            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped())
            {
            }
            /*pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
            printf("Saved Point Cloud Data\n");

            // Show Point Cloud on Cloud Viewer
            printf("Open viewer\n");
            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped()) {

            }*/

            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した點群を保存
                printf("Save Point Cloud Data\n");

                //break;
            }
        }

        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);

        printf("Disconnect Kinect Sensor\n");



        return 0;
    }

修改主要包括刪除不斷更新點雲的循環,即:您可以在第二個代碼中看到它的注釋。

while (!viewer.wasStopped())

問題是點雲查看器不顯示Kinect接收到的任何數據,我想知道無法顯示它的原因。

您的代碼似乎只顯示它從Kinect接收到的第一幀,該幀可能為空或無效。 您是否檢查過雲中的點是否理智?

但是,您可能希望以不同的方式解決該問題:

  1. 保持可視化循環不變。
  2. 使用registerKeyboardCallbackdoc )注冊密鑰處理程序。
  3. 當按下特定鍵時,將布爾值設置為true。
  4. 在可視化循環中,如果該布爾變量為true,則跳過從Kinect抓取的幀。 它應保留先前設置的雲。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM