C++ Kinect v2 & freenect2:如何将深度数据转换为真实世界的坐标

C++ Kinect v2 & freenect2: how to convert depth data to real world coordinates

本文关键字:转换 数据 深度 真实世界 坐标 v2 Kinect freenect2 C++      更新时间:2023-10-16

我正在尝试使用Kinect v2相机(在Linux中)计算真实世界的xyz坐标,但我的计算给了我错误的结果。

代码如下:

cv::Point3f xyzWorld={0.0f};
xyzWorld.z = pointDepth;
xyzWorld.x = (float) ((float)x -(depthcx)) * xyzWorld.z / depthfx;
xyzWorld.y = (float) ((float)y - (depthcy)) * xyzWorld.z / depthfy;
xyzWorld.z = pointDepth;
return xyzWorld;

我认为问题是由于fx, fy, cxcy的深度值。

有人能帮帮我吗?

我用的是freenect2

为什么不直接使用OpenNi实现呢

 OniStatus VideoStream::convertDepthToWorldCoordinates(float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ)
{
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH)
    {
        m_errorLogger.Append("convertDepthToWorldCoordinates: Stream is not from DEPTHn");
        return ONI_STATUS_NOT_SUPPORTED;
    }
    float normalizedX = depthX / m_worldConvertCache.resolutionX - .5f;
    float normalizedY = .5f - depthY / m_worldConvertCache.resolutionY;
    OniVideoMode videoMode;
    int size = sizeof(videoMode);
    getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &videoMode, &size);
    float const convertToMillimeters = (videoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_100_UM) ? 10.f : 1.f;
    *pWorldX = (normalizedX * depthZ * m_worldConvertCache.xzFactor) / convertToMillimeters;
    *pWorldY = (normalizedY * depthZ * m_worldConvertCache.yzFactor) / convertToMillimeters;
    *pWorldZ = depthZ / convertToMillimeters;
    return ONI_STATUS_OK;
}

OniStatus VideoStream::convertWorldToDepthCoordinates(float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ)
{
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH)
    {
        m_errorLogger.Append("convertWorldToDepthCoordinates: Stream is not from DEPTHn");
        return ONI_STATUS_NOT_SUPPORTED;
    }
    *pDepthX = m_worldConvertCache.coeffX * worldX / worldZ + m_worldConvertCache.halfResX;
    *pDepthY = m_worldConvertCache.halfResY - m_worldConvertCache.coeffY * worldY / worldZ;
    *pDepthZ = worldZ;
    return ONI_STATUS_OK;
}

和世界转换缓存:

 void VideoStream::refreshWorldConversionCache()
{
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH)
    {
        return;
    }
    OniVideoMode videoMode;
    int size = sizeof(videoMode);
    getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &videoMode, &size);
    size = sizeof(float);
    float horizontalFov;
    float verticalFov;
    getProperty(ONI_STREAM_PROPERTY_HORIZONTAL_FOV, &horizontalFov, &size);
    getProperty(ONI_STREAM_PROPERTY_VERTICAL_FOV, &verticalFov, &size);
    m_worldConvertCache.xzFactor = tan(horizontalFov / 2) * 2;
    m_worldConvertCache.yzFactor = tan(verticalFov / 2) * 2;
    m_worldConvertCache.resolutionX = videoMode.resolutionX;
    m_worldConvertCache.resolutionY = videoMode.resolutionY;
    m_worldConvertCache.halfResX = m_worldConvertCache.resolutionX / 2;
    m_worldConvertCache.halfResY = m_worldConvertCache.resolutionY / 2;
    m_worldConvertCache.coeffX = m_worldConvertCache.resolutionX / m_worldConvertCache.xzFactor;
    m_worldConvertCache.coeffY = m_worldConvertCache.resolutionY / m_worldConvertCache.yzFactor;
}
struct WorldConversionCache
    {
        float xzFactor;
        float yzFactor;
        float coeffX;
        float coeffY;
        int resolutionX;
        int resolutionY;
        int halfResX;
        int halfResY;
    } m_worldConvertCache;

全部取自OpenNI GitHub库

水平和垂直视角你可以直接从每一帧的描述中得到