我如何使用点云库捕获一帧

How can I capture one frame using a Point Cloud Library?

本文关键字:一帧 何使用      更新时间:2023-10-16
        // Sample_PCL.cpp : Defines the entry point for the console application.
    //
    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 点群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>
    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }
    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OKn");
        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OKn");
        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OKn");

        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OKn");
        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OKn");
        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OKn");
        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OKn");
        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OKn");
        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080
                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);
        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OKn");
        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424
                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);
        printf("Display Point Cloudn");
        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  点群のウィンドウ表示
        while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);
            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);
            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造体
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 点群の数
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;
            for (int y = 0; y < depthHeight; y++) {
                for (int x = 0; x < depthWidth; x++) {
                    pcl::PointXYZRGB point;     // PCLで使用する点群情報
                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];
                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }
                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }
                    pointcloud->push_back(point);
                }
            }
            // Show Point Cloud on Cloud Viewer
            viewer.showCloud(pointcloud);
            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した点群を保存
                printf("Save Point Cloud Datan");
                //break;
            }
        }
        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);
        printf("Disconnect Kinect Sensorn");

        return 0;
    }

前面的代码取自点云图书馆网站的教程,使用Kinect实时显示Kinect所看到的点云。因此点云是不断变化的。这就是为什么我只想获得一帧,换句话说,我希望点云冻结而不是不断捕获新帧。

,这是我的修改。

        // Sample_PCL.cpp : Defines the entry point for the console application.
    //
    #include "stdafx.h"
    #define NOMINMAX
    #include <Windows.h>
    #include <Kinect.h>                                 // Kinectを使用するためのヘッダファイル
    #include <pcl/visualization/cloud_viewer.h>         // PCLを使用して表示するためのヘッダファイル
    #include <pcl/io/pcd_io.h>                          // 点群データを保存するためのヘッダファイル(.pcd, .ply)
    //#include <pcl/io/ply_io.h>
    #include <pcl/point_types.h>
    #include <iostream>
    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
        if (pInterfaceToRelease != NULL) {
            pInterfaceToRelease->Release();
        }
    }
    int main()
    {
        // Create Sensor Instance
        IKinectSensor* pSensor;
        HRESULT hResult = S_OK;
        hResult = GetDefaultKinectSensor(&pSensor);
        if (FAILED(hResult)) {
            std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
            return -1;
        }
        printf("GetDfaultKinectSensor is OKn");
        // Open Sensor
        hResult = pSensor->Open();
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::Open()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::Open is OKn");
        // Retrieved Coordinate Mapper
        ICoordinateMapper* pCoordinateMapper;
        hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_CoordinateMapper is OKn");

        // Retrieved Color Frame Source
        IColorFrameSource* pColorSource;
        hResult = pSensor->get_ColorFrameSource(&pColorSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_ColorFrameSource is OKn");
        // Open Color Frame Reader
        IColorFrameReader* pColorReader;
        hResult = pColorSource->OpenReader(&pColorReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::OpenReader is OKn");
        // Retrieved Depth Frame Source
        IDepthFrameSource* pDepthSource;
        hResult = pSensor->get_DepthFrameSource(&pDepthSource);
        if (FAILED(hResult)) {
            std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
            return -1;
        }
        printf("IKinectSensor::get_DepthFrameSource is OKn");
        // Open Depth Frame Reader
        IDepthFrameReader* pDepthReader;
        hResult = pDepthSource->OpenReader(&pDepthReader);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::OpenReader is OKn");
        // Retrieved Color Frame Size
        IFrameDescription* pColorDescription;
        hResult = pColorSource->get_FrameDescription(&pColorDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IColorFrameSource::get_FrameDescription is OKn");
        int colorWidth = 0;
        int colorHeight = 0;
        pColorDescription->get_Width(&colorWidth); // 1920
        pColorDescription->get_Height(&colorHeight); // 1080
                                                     // To Reserve Color Frame Buffer
        std::vector<RGBQUAD> colorBuffer(colorWidth * colorHeight);
        // Retrieved Depth Frame Size
        IFrameDescription* pDepthDescription;
        hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
        if (FAILED(hResult)) {
            std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
            return -1;
        }
        printf("IDepthFrameSource::get_FrameDescription is OKn");
        int depthWidth = 0;
        int depthHeight = 0;
        pDepthDescription->get_Width(&depthWidth); // 512
        pDepthDescription->get_Height(&depthHeight); // 424
                                                     // To Reserve Depth Frame Buffer
        std::vector<UINT16> depthBuffer(depthWidth * depthHeight);
        printf("Display Point Cloudn");
        // Create Cloud Viewer
        pcl::visualization::CloudViewer viewer("Point Cloud Viewer");       //  点群のウィンドウ表示
        //while (!viewer.wasStopped()) {
            // Acquire Latest Color Frame
            IColorFrame* pColorFrame = nullptr;
            hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Color Data
                hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IColorFrame::CopyConvertedFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pColorFrame);
            // Acquire Latest Depth Frame
            IDepthFrame* pDepthFrame = nullptr;
            hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
            if (SUCCEEDED(hResult)) {
                // Retrieved Depth Data
                hResult = pDepthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
                if (FAILED(hResult)) {
                    std::cerr << "Error : IDepthFrame::CopyFrameDataToArray()" << std::endl;
                }
            }
            SafeRelease(pDepthFrame);

            // Point Cloud Libraryの設定
            // Create Point Cloud
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZRGB>());     // PCLの構造体
            pointcloud->width = static_cast<uint32_t>(depthWidth);      // 点群の数
            pointcloud->height = static_cast<uint32_t>(depthHeight);
            pointcloud->is_dense = false;
            for (int y = 0; y < depthHeight; y++) {
                for (int x = 0; x < depthWidth; x++) {
                    //printf("scannn");
                    pcl::PointXYZRGB point;     // PCLで使用する点群情報
                    DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
                    UINT16 depth = depthBuffer[y * depthWidth + x];
                    // Coordinate Mapping Depth to Color Space, and Setting PointCloud RGB
                    ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
                    pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, depth, &colorSpacePoint);     //  色の座標系
                    int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f));
                    int colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        RGBQUAD color = colorBuffer[colorY * colorWidth + colorX];
                        // 色の情報を格納する
                        point.b = color.rgbBlue;
                        point.g = color.rgbGreen;
                        point.r = color.rgbRed;
                    }
                    // Coordinate Mapping Depth to Camera Space, and Setting PointCloud XYZ
                    CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };           // カメラ空間
                    pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, depth, &cameraSpacePoint);
                    if ((0 <= colorX) && (colorX < colorWidth) && (0 <= colorY) && (colorY < colorHeight)) {
                        // 直交座標系の情報を格納する
                        point.x = cameraSpacePoint.X;
                        point.y = cameraSpacePoint.Y;
                        point.z = cameraSpacePoint.Z;
                    }
                    pointcloud->push_back(point);
                }
            //}
            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped())
            {
            }
            /*pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
            printf("Saved Point Cloud Datan");
            // Show Point Cloud on Cloud Viewer
            printf("Open viewern");
            viewer.showCloud(pointcloud);
            while (!viewer.wasStopped()) {
            }*/
            // Input Key ( Exit ESC key )
            if (GetKeyState(VK_ESCAPE) < 0) {
                pcl::io::savePCDFile("PointCloud.pcd", *pointcloud);
                //pcl::io::savePLYFile("test_pcd2.ply", *pointcloud);           // 最後に取得した点群を保存
                printf("Save Point Cloud Datan");
                //break;
            }
        }
        // End Processing
        SafeRelease(pColorSource);
        SafeRelease(pDepthSource);
        SafeRelease(pColorReader);
        SafeRelease(pDepthReader);
        SafeRelease(pColorDescription);
        SafeRelease(pDepthDescription);
        SafeRelease(pCoordinateMapper);
        if (pSensor) {
            pSensor->Close();
        }
        SafeRelease(pSensor);
        printf("Disconnect Kinect Sensorn");

        return 0;
    }

修改主要包括删除不断更新点云的循环,即:您可以在第二段代码中看到它的注释。

while (!viewer.wasStopped())

问题是点云查看器没有显示Kinect接收到的任何数据,我想知道不能显示的原因。

你的代码似乎只显示它从Kinect接收到的第一帧,这可能是空的或无效的。你检查过你要取的云中的点是正常的吗?

但是,您可能希望以不同的方式处理这个问题:

  1. 保持可视化循环不变
  2. 使用registerKeyboardCallback (doc)注册一个密钥处理程序。
  3. 当按下某个键时,设置布尔值为true。
  4. 在可视化循环中,如果该布尔变量为真,则跳过Kinect的帧抓取。应该保留之前设置的云。