realsense D4XX系列相機彩色圖像素到空間點的轉化原理

realsense D4XX系列相機代碼,但原理是通用的。

float cx = 323.305, cy = 237.231;//D415相機像素640*480@30幀時的參數:中心平移量,單位像素,通過例子程序sensorcontrol獲得:Principal point
float fx = 616.924, fy = 616.523;//D415相機像素640*480@30幀時的參數:物理焦距f的倍數,通過例子程序sensorcontrol獲得:Focal length
				auto udist = depth.get_distance(U, V);
				if (udist == 0.0) continue;
//通用的3D點計算公式,經證實,跟rs2_deproject_pixel_to_point得到的3D點座標完全相同,也就是說realsense內部函數用的也是此公式
                //計算此像素點的空間座標
				worldpoint[0] = (U - cx)*udist / fx;
				worldpoint[1] = (V - cy)*udist / fy;
				worldpoint[2] = udist;

以上是我自己的代碼,以下是參考:
RGB-D相機根據彩色圖像素獲得空間點
/* Given pixel coordinates and depth in an image with no distortion or inverse distortion coefficients, compute the corresponding point in 3D space relative to the same camera */
static void rs2_deproject_pixel_to_point(float point[3], const struct rs2_intrinsics * intrin, const float pixel[2], float depth)
{
assert(intrin->model != RS2_DISTORTION_MODIFIED_BROWN_CONRADY); // Cannot deproject from a forward-distorted image
assert(intrin->model != RS2_DISTORTION_FTHETA); // Cannot deproject to an ftheta image
//assert(intrin->model != RS2_DISTORTION_BROWN_CONRADY); // Cannot deproject to an brown conrady model

float x = (pixel[0] - intrin->ppx) / intrin->fx;
float y = (pixel[1] - intrin->ppy) / intrin->fy;
if(intrin->model == RS2_DISTORTION_INVERSE_BROWN_CONRADY)
{
    float r2  = x*x + y*y;
    float f = 1 + intrin->coeffs[0]*r2 + intrin->coeffs[1]*r2*r2 + intrin->coeffs[4]*r2*r2*r2;
    float ux = x*f + 2*intrin->coeffs[2]*x*y + intrin->coeffs[3]*(r2 + 2*x*x);
    float uy = y*f + 2*intrin->coeffs[3]*x*y + intrin->coeffs[2]*(r2 + 2*y*y);
    x = ux;
    y = uy;
}
point[0] = depth * x;
point[1] = depth * y;
point[2] = depth;

}

RGB-D相機空間點對應的平面像素
/* Given a point in 3D space, compute the corresponding pixel coordinates in an image with no distortion or forward distortion coefficients produced by the same camera */

static void rs2_project_point_to_pixel(float pixel[2], const struct rs2_intrinsics * intrin, const float point[3])
{
//assert(intrin->model != RS2_DISTORTION_INVERSE_BROWN_CONRADY); // Cannot project to an inverse-distorted image

float x = point[0] / point[2], y = point[1] / point[2];

if(intrin->model == RS2_DISTORTION_MODIFIED_BROWN_CONRADY)
{

    float r2  = x*x + y*y;
    float f = 1 + intrin->coeffs[0]*r2 + intrin->coeffs[1]*r2*r2 + intrin->coeffs[4]*r2*r2*r2;
    x *= f;
    y *= f;
    float dx = x + 2*intrin->coeffs[2]*x*y + intrin->coeffs[3]*(r2 + 2*x*x);
    float dy = y + 2*intrin->coeffs[3]*x*y + intrin->coeffs[2]*(r2 + 2*y*y);
    x = dx;
    y = dy;
}
if (intrin->model == RS2_DISTORTION_FTHETA)
{
    float r = sqrt(x*x + y*y);
        auto rd = (1.0f / intrin->coeffs[0] * atan(2 * r* tan(intrin->coeffs[0] / 2.0f)));
        x *= rd / r;
        y *= rd / r;
}

pixel[0] = x * intrin->fx + intrin->ppx;
pixel[1] = y * intrin->fy + intrin->ppy;

}

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章