如何使用Point Cloud Library捕获一个帧?

时间:2016-10-11 01:13:43

标签: c++ kinect point-cloud-library

        // Sample_PCL.cpp : Defines the entry point for the console application.
    //

    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 点群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>

    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }

    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OK\n");

        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OK\n");

        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OK\n");


        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OK\n");

        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OK\n");

        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OK\n");

        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OK\n");

        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OK\n");

        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080

                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);

        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OK\n");

        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424

                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);

        printf("Display Point Cloud\n");

        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  点群のウィンドウ表示

        while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);

            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);

            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造体
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 点群の数
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;

            for (int y = 0; y < depthHeight; y++) {
                for (int x = 0; x < depthWidth; x++) {
                    pcl::PointXYZRGB point;     // PCLで使用する点群情報

                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];

                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }

                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }

                    pointcloud->push_back(point);
                }
            }

            // Show Point Cloud on Cloud Viewer
            viewer.showCloud(pointcloud);

            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した点群を保存
                printf("Save Point Cloud Data\n");

                //break;
            }
        }

        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);

        printf("Disconnect Kinect Sensor\n");



        return 0;
    }

前面的代码是来自Point Cloud Library网站的教程中的代码,使用Kinect显示Kinect实时看到的点云。因此,点云不断变化。这就是为什么我想得到一个框架,换句话说,我希望点云可以冻结,而不是不断捕捉新的框架。

这是我的修改。

        // Sample_PCL.cpp : Defines the entry point for the console application.
    //

    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 点群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>

    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }

    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OK\n");

        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OK\n");

        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OK\n");


        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OK\n");

        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OK\n");

        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OK\n");

        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OK\n");

        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OK\n");

        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080

                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);

        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OK\n");

        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424

                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);

        printf("Display Point Cloud\n");

        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  点群のウィンドウ表示

        //while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);

            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);



            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造体
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 点群の数
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;

            for (int y = 0; y < depthHeight; y++) {

                for (int x = 0; x < depthWidth; x++) {
                    //printf("scann\n");
                    pcl::PointXYZRGB point;     // PCLで使用する点群情報

                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];

                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }

                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }

                    pointcloud->push_back(point);
                }
            //}

            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped())
            {
            }
            /*pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
            printf("Saved Point Cloud Data\n");

            // Show Point Cloud on Cloud Viewer
            printf("Open viewer\n");
            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped()) {

            }*/

            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した点群を保存
                printf("Save Point Cloud Data\n");

                //break;
            }
        }

        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);

        printf("Disconnect Kinect Sensor\n");



        return 0;
    }

修改主要包括删除不断更新点云的循环,即:您可以在第二个代码中看到它的注释。

while (!viewer.wasStopped())

问题是Point cloud查看器不显示Kinect收到的任何数据,我想知道它无法显示的原因。

1 个答案:

答案 0 :(得分:1)

您的代码似乎只显示从Kinect收到的第一帧,该帧可能为空或无效。您是否检查了您所采取的云中的点是否合理?

但是,您可能希望以不同方式解决问题:

  1. 保持可视化循环不变。
  2. 使用registerKeyboardCallbackdoc)注册密钥处理程序。
  3. 按下特定键时,将布尔值设置为true。
  4. 在可视化循环中,如果该布尔变量为true,则跳过从Kinect获取的帧。它应该保留以前设置的云。