realsense D4XX系列相机彩色图像素到空间点的转化原理

realsense D4XX系列相机代码,但原理是通用的。

float cx = 323.305, cy = 237.231;//D415相机像素640*480@30帧时的参数:中心平移量,单位像素,通过例子程序sensorcontrol获得:Principal point
float fx = 616.924, fy = 616.523;//D415相机像素640*480@30帧时的参数:物理焦距f的倍数,通过例子程序sensorcontrol获得:Focal length
				auto udist = depth.get_distance(U, V);
				if (udist == 0.0) continue;
//通用的3D点计算公式,经证实,跟rs2_deproject_pixel_to_point得到的3D点座标完全相同,也就是说realsense内部函数用的也是此公式
                //计算此像素点的空间座标
				worldpoint[0] = (U - cx)*udist / fx;
				worldpoint[1] = (V - cy)*udist / fy;
				worldpoint[2] = udist;

以上是我自己的代码,以下是参考:
RGB-D相机根据彩色图像素获得空间点
/* Given pixel coordinates and depth in an image with no distortion or inverse distortion coefficients, compute the corresponding point in 3D space relative to the same camera */
static void rs2_deproject_pixel_to_point(float point[3], const struct rs2_intrinsics * intrin, const float pixel[2], float depth)
{
assert(intrin->model != RS2_DISTORTION_MODIFIED_BROWN_CONRADY); // Cannot deproject from a forward-distorted image
assert(intrin->model != RS2_DISTORTION_FTHETA); // Cannot deproject to an ftheta image
//assert(intrin->model != RS2_DISTORTION_BROWN_CONRADY); // Cannot deproject to an brown conrady model

float x = (pixel[0] - intrin->ppx) / intrin->fx;
float y = (pixel[1] - intrin->ppy) / intrin->fy;
if(intrin->model == RS2_DISTORTION_INVERSE_BROWN_CONRADY)
{
    float r2  = x*x + y*y;
    float f = 1 + intrin->coeffs[0]*r2 + intrin->coeffs[1]*r2*r2 + intrin->coeffs[4]*r2*r2*r2;
    float ux = x*f + 2*intrin->coeffs[2]*x*y + intrin->coeffs[3]*(r2 + 2*x*x);
    float uy = y*f + 2*intrin->coeffs[3]*x*y + intrin->coeffs[2]*(r2 + 2*y*y);
    x = ux;
    y = uy;
}
point[0] = depth * x;
point[1] = depth * y;
point[2] = depth;

}

RGB-D相机空间点对应的平面像素
/* Given a point in 3D space, compute the corresponding pixel coordinates in an image with no distortion or forward distortion coefficients produced by the same camera */

static void rs2_project_point_to_pixel(float pixel[2], const struct rs2_intrinsics * intrin, const float point[3])
{
//assert(intrin->model != RS2_DISTORTION_INVERSE_BROWN_CONRADY); // Cannot project to an inverse-distorted image

float x = point[0] / point[2], y = point[1] / point[2];

if(intrin->model == RS2_DISTORTION_MODIFIED_BROWN_CONRADY)
{

    float r2  = x*x + y*y;
    float f = 1 + intrin->coeffs[0]*r2 + intrin->coeffs[1]*r2*r2 + intrin->coeffs[4]*r2*r2*r2;
    x *= f;
    y *= f;
    float dx = x + 2*intrin->coeffs[2]*x*y + intrin->coeffs[3]*(r2 + 2*x*x);
    float dy = y + 2*intrin->coeffs[3]*x*y + intrin->coeffs[2]*(r2 + 2*y*y);
    x = dx;
    y = dy;
}
if (intrin->model == RS2_DISTORTION_FTHETA)
{
    float r = sqrt(x*x + y*y);
        auto rd = (1.0f / intrin->coeffs[0] * atan(2 * r* tan(intrin->coeffs[0] / 2.0f)));
        x *= rd / r;
        y *= rd / r;
}

pixel[0] = x * intrin->fx + intrin->ppx;
pixel[1] = y * intrin->fy + intrin->ppy;

}

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章