英特爾realsense相機標定

https://store.intelrealsense.com/buy-intel-realsense-d400-cameras-calibration-target.html

https://www.intel.com/content/dam/support/us/en/documents/emerging-technologies/intel-realsense-technology/RealSense_D400%20_Custom_Calib_Paper.pdf

 

我看代碼可以自己直接輸出內參,相機RT,應該是出廠標定好的

## License: Apache 2.0. See LICENSE file in root directory.
## Copyright(c) 2015-2017 Intel Corporation. All Rights Reserved.

###############################################
##      Open CV and Numpy integration        ##
###############################################

import pyrealsense2 as rs
import numpy as np
import cv2


"""
Returns R, T transform from src to dst
"""
def get_extrinsics(src, dst):
    extrinsics = src.get_extrinsics_to(dst)
    R = np.reshape(extrinsics.rotation, [3,3]).T
    T = np.array(extrinsics.translation)
    return (R, T)

"""
Returns a camera matrix K from librealsense intrinsics
"""
def camera_matrix(intrinsics):
    return np.array([[intrinsics.fx,             0, intrinsics.ppx],
                     [            0, intrinsics.fy, intrinsics.ppy],
                     [            0,             0,              1]])

'''
Intel RealSense SDK 2.0/tools/rs-enumerate-devices

'''
# Configure depth and color streams
pipeline = rs.pipeline()
config = rs.config()
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
#1920, 1080
#1280, 720
config.enable_stream(rs.stream.color, 1920, 1080, rs.format.bgr8, 30)
config.enable_stream(rs.stream.infrared, 1, 640, 480, rs.format.y8, 30);
config.enable_stream(rs.stream.infrared, 2, 640, 480, rs.format.y8, 30);

'''
Intel RealSense D435i
https://dev.intelrealsense.com/docs/opencv-wrapper
DepthDepth
Technology:
Active IR Stereo 

Depth Field of View (FOV):
87°±3° x 58°±1° x 95°±3°Minimum Depth Distance (Min-Z):
0.105 m 

Depth Output Resolution & Frame Rate:
Up to 1280 x 720 active stereo depth resolution. Up to 90 fps.


RGBRGB
Sensor Resolution & Frame Rate):
1920 x 1080 

RGB Frame Rate:
30 fps

RGB Sensor FOV (H x V x D):
69.4° x 42.5° x 77° (+/- 3°)
'''
# Start streaming
pipeline.start(config)

# Retreive the stream and intrinsic properties for both cameras
profiles = pipeline.get_active_profile()
streams = {"left": profiles.get_stream(rs.stream.infrared, 1).as_video_stream_profile(),
           "right": profiles.get_stream(rs.stream.infrared, 2).as_video_stream_profile()}
infrared_intrinsics = {"left": streams["left"].get_intrinsics(),
                       "right": streams["right"].get_intrinsics()}

# Get the relative extrinsics between the left and right camera
(R, T) = get_extrinsics(streams["left"], streams["right"])

depth_profile = rs.video_stream_profile(profiles.get_stream(rs.stream.depth))
depth_intrinsics = depth_profile.get_intrinsics()

color_profile = rs.video_stream_profile(profiles.get_stream(rs.stream.color))
color_intrinsics = color_profile.get_intrinsics()

(R1, T1) = get_extrinsics(streams["left"], color_profile)

print("infrared_intrinsics_left\n", streams["left"].get_intrinsics())
print("infrared_intrinsics_right\n", streams["right"].get_intrinsics())
print("color_intrinsics\n", color_intrinsics)
print("depth_intrinsics\n", depth_intrinsics)

print("Rt-irLeft2irRight\n", get_extrinsics(streams["left"], streams["right"]))
print("Rt-irLeft2color\n", get_extrinsics(streams["left"], color_profile))
print("Rt-irRight2color\n", get_extrinsics(streams["right"], color_profile))

print("Rt-dep2color\n", get_extrinsics(depth_profile, color_profile))
print("Rt-color2dep\n", get_extrinsics(color_profile, depth_profile))
print("Rt-color2irLeft\n", get_extrinsics(color_profile, streams["left"]))
print("Rt-color2irRight\n", get_extrinsics(color_profile, streams["right"]))


#畸變係數爲0
#解釋:
# https://github.com/IntelRealSense/librealsense/issues/1430
# https://github.com/IntelRealSense/librealsense/issues/432
# https://github.com/IntelRealSense/librealsense/issues/368

#a = cv2.imread("./image/image_depth_1.png", -1)

subnameRGB = "image_color_"
subnameDEP = "image_depth_"
subnameIR1 = "image_infrared_left_"
subnameIR2 = "image_infrared_right_"
color_endname = ".jpg"
depth_endname = ".png"
num = 0
try:
    while True:
        # Wait for a coherent pair of frames: depth and color
        frames = pipeline.wait_for_frames()
        depth_frame = frames.get_depth_frame()
        color_frame = frames.get_color_frame()
        ir_frame_left = frames.get_infrared_frame(1)
        ir_frame_right = frames.get_infrared_frame(2)

        if not depth_frame or not color_frame or not ir_frame_left or not ir_frame_right:
            continue

        # Convert images to numpy arrays
        depth_image = np.asanyarray(depth_frame.get_data())
        color_image = np.asanyarray(color_frame.get_data())

        ir_left_image = np.asanyarray(ir_frame_left.get_data())
        ir_right_image = np.asanyarray(ir_frame_right.get_data())



        cv2.equalizeHist(ir_left_image, ir_left_image)
        ir_colormap = cv2.applyColorMap(ir_left_image, cv2.COLORMAP_JET)
        depth_colormap = cv2.applyColorMap(cv2.convertScaleAbs(depth_image, alpha=0.03), cv2.COLORMAP_JET)

        resize_color = None
        h,w = ir_colormap.shape[:2]
        resize_color = cv2.resize(color_image, (w,h), resize_color)
        images = np.hstack((ir_colormap, resize_color))

        cv2.imshow('video test', images)
        if cv2.waitKey(1) & 0xFF == ord('s'):
            num += 1
            cv2.imwrite('./image/'+ subnameRGB + str(num) + depth_endname, color_image)
            #depth_image.astype(np.uint16)
            cv2.imwrite('./image/'+ subnameDEP + str(num) + depth_endname, depth_image)
            cv2.imwrite('./image/'+ subnameIR1 + str(num) + depth_endname, ir_left_image)
            cv2.imwrite('./image/'+ subnameIR2 + str(num) + depth_endname, ir_right_image)
        else:
            continue
finally:
    # Stop streaming
    pipeline.stop()
infrared_intrinsics_left
 width: 640, height: 480, ppx: 314.846, ppy: 238.702, fx: 384.956, fy: 384.956, model: 4, coeffs: [0, 0, 0, 0, 0]
infrared_intrinsics_right
 width: 640, height: 480, ppx: 314.846, ppy: 238.702, fx: 384.956, fy: 384.956, model: 4, coeffs: [0, 0, 0, 0, 0]
color_intrinsics
 width: 1920, height: 1080, ppx: 967.033, ppy: 539.492, fx: 1389.77, fy: 1391.03, model: 2, coeffs: [0, 0, 0, 0, 0]
depth_intrinsics
 width: 640, height: 480, ppx: 314.846, ppy: 238.702, fx: 384.956, fy: 384.956, model: 4, coeffs: [0, 0, 0, 0, 0]
Rt-irLeft2irRight
 (array([[1., 0., 0.],
       [0., 1., 0.],
       [0., 0., 1.]]), array([-0.04994006,  0.        ,  0.        ]))
Rt-irLeft2color
 (array([[ 0.99977833, -0.0194878 ,  0.00796849],
       [ 0.01946379,  0.99980581,  0.00307983],
       [-0.00802696, -0.00292405,  0.99996352]]), array([1.47467265e-02, 4.92615509e-04, 4.66895290e-05]))
Rt-irRight2color
 (array([[ 0.99977833, -0.0194878 ,  0.00796849],
       [ 0.01946379,  0.99980581,  0.00307983],
       [-0.00802696, -0.00292405,  0.99996352]]), array([ 0.06467572,  0.00146464, -0.00035418]))
Rt-dep2color
 (array([[ 0.99977833, -0.0194878 ,  0.00796849],
       [ 0.01946379,  0.99980581,  0.00307983],
       [-0.00802696, -0.00292405,  0.99996352]]), array([1.47467265e-02, 4.92615509e-04, 4.66895290e-05]))
Rt-color2dep
 (array([[ 0.99977833,  0.01946379, -0.00802696],
       [-0.0194878 ,  0.99980581, -0.00292405],
       [ 0.00796849,  0.00307983,  0.99996352]]), array([-0.01475267, -0.000205  , -0.00016571]))
Rt-color2irLeft
 (array([[ 0.99977833,  0.01946379, -0.00802696],
       [-0.0194878 ,  0.99980581, -0.00292405],
       [ 0.00796849,  0.00307983,  0.99996352]]), array([-0.01475267, -0.000205  , -0.00016571]))
Rt-color2irRight
 (array([[ 0.99977833,  0.01946379, -0.00802696],
       [-0.0194878 ,  0.99980581, -0.00292405],
       [ 0.00796849,  0.00307983,  0.99996352]]), array([-0.06469274, -0.000205  , -0.00016571]))

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章