整理資料發現未對Bumblebee的分離圖像、保存成cv::Mat格式的使用方法留存筆記,這裏補上,備忘。
使用須知
-
下述Bumblebee相機直接獲取的圖片是左右視圖混合(interleaved)之後的結果,如果需要進行雙目視覺處理,需要對interleaved的結果進行分離,得到左右相機採集的圖片;
-
Bumblebee相機出廠時即被標定好,無需自己標定,使用官方提供的API即可獲得標定好的左右視圖;當然也可以獲得raw圖,自己進行標定。
-
標定好的stereo pairs只是進行了相機標定,如果使用該圖片計算深度圖,還需進行立體校正,使相機滿足極線約束。
代碼
環境:Win10+OpenCV3.4.1 +(Triclops_4.0.3.4_x64+FlyCapture_2.12.3.31_x64)
/*
可選分辨率:
320×240; 384×288; 400×300; 512×384;
640×480; 768×576; 800×600; 960×720;
1024×768; 1152×864; 1280×960;
*/
#include <iostream>
#include <fstream>
#include <string>
#include "captureimages.h"
#include "Example_Exception.h"
#include <opencv2/highgui.hpp>
#include <opencv2/opencv.hpp>
using namespace std;
using namespace cv;
int feed_height = 480, feed_width = 640; // 設置輸出分辨率
int main(int /* argc */, char ** /* argv */)
{
TriclopsError triclops_status;
TriclopsContext context;
TriclopsColorStereoPair colorStereoInput;
// camera to be used to grab color image
FC2::Camera camera;
// grabbed unprocessed image
FC2::Image grabbedImage;
try {
// connect camera
FC2::Error flycap_status = camera.Connect();
handleError("FlyCapture2::Camera::Connect()", flycap_status, __LINE__);
// configure camera by setting stereo mode
//configureCamera(camera);
// 選擇使用的相機:左+右 OR 中+右
FC2T::ErrorType fc2TriclopsError;
FC2T::StereoCameraMode mode = FC2T::TWO_CAMERA_WIDE;//TWO_CAMERA_WIDE TWO_CAMERA_NARROW
fc2TriclopsError = FC2T::setStereoMode(camera, mode);
handleError("Fc2Triclops::setStereoMode()", fc2TriclopsError, __LINE__);
// generate the Triclops context
generateTriclopsContext(camera, context);
// 注意:必須先設置triclopsSetCameraConfiguration,再設置分辨率:triclopsPrepareRectificationData,否則會導致同一分辨率下,不同基線對應的焦距不同
// Setting the camera configuration. This is useful only when using a triple sensor stereo camera (e.g. XB3)
triclops_status = triclopsSetCameraConfiguration(context, TriCfg_2CAM_HORIZONTAL_WIDE);// TriCfg_2CAM_HORIZONTAL_WIDE TriCfg_2CAM_HORIZONTAL_NARROW
triclops_examples::handleError("triclopsSetCameraConfiguration()", triclops_status, __LINE__);
// Prepare the rectification buffers using the input resolution as output resolution.
triclops_status = triclopsPrepareRectificationData(context, feed_height, feed_width, feed_height, feed_width);
triclops_examples::handleError("triclopsPrepareRectificationData()", triclops_status, __LINE__);
// 開始採集數據
FC2::Error fc2Error = camera.StartCapture();
handleError("FlyCapture2::Camera::StartCapture()", fc2Error, __LINE__);
while ((char)cv::waitKey(5) != 'q') // 'q'退出
{
// 由內存中提取圖像(左右視圖“糾纏”在一起)
fc2Error = camera.RetrieveBuffer(&grabbedImage); // grabbedImage爲未標定原圖(包含左右視圖)
handleError("FlyCapture2::Camera::RetrieveBuffer()", fc2Error, __LINE__);
// right and left image extracted from grabbed image
ImageContainer imageCont;
// generate color stereo input from grabbed image
generateColorStereoInput(context, grabbedImage, imageCont, colorStereoInput);
// 對圖像進行校正,或使用:triclopsRectifyColorImage
triclops_status = triclopsColorRectify(context, &colorStereoInput);
triclops_examples::handleError("triclopsColorRectify()", triclops_status, __LINE__);
// 獲取校正後的左右圖像--分離
TriclopsColorImage color_left, color_right; // rectified image
triclops_status = triclopsGetColorImage(context, TriImg_RECTIFIED_COLOR, TriCam_RIGHT, &color_right);
triclops_examples::handleError("triclopsGetColorImage()", triclops_status, __LINE__);
triclops_status = triclopsGetColorImage(context, TriImg_RECTIFIED_COLOR, TriCam_LEFT, &color_left);
triclops_examples::handleError("triclopsGetColorImage()", triclops_status, __LINE__);
/**************** 轉換成Mat格式 ****************/
cv::Mat right, left;
cv::Mat imageR(color_right.nrows, color_right.ncols, CV_8UC4, color_right.data->value, color_right.rowinc);
cv::Mat imageL(color_left.nrows, color_left.ncols, CV_8UC4, color_left.data->value, color_left.rowinc);
cvtColor(imageL, left, CV_RGBA2RGB);
cvtColor(imageR, right, CV_RGBA2RGB); // 注意格式
imwrite("left.png", left);
imwrite("right.png", right);
/******************* END********************/
}
// Close the camera
camera.StopCapture();
camera.Disconnect();
// Destroy the Triclops context
triclops_status = triclopsDestroyContext(context);
triclops_examples::handleError("triclopsDestroyContext()", triclops_status, __LINE__);
}
catch (const triclops_examples::Example_Exception &exception) {
std::cout << exception.what() << std::endl;
triclopsDestroyContext(context);
triclopsFreeColorStereoPair(&colorStereoInput, false);
if (camera.IsConnected()) {
camera.StopCapture();
camera.Disconnect();
}
return -1;
}
cv::waitKey(0);
return 0;
}
// configure camera by setting stereo mode
void configureCamera(FC2::Camera &camera)
{
FC2T::ErrorType fc2TriclopsError;
FC2T::StereoCameraMode mode = FC2T::TWO_CAMERA_WIDE;
fc2TriclopsError = FC2T::setStereoMode(camera, mode);
handleError("Fc2Triclops::setStereoMode()", fc2TriclopsError, __LINE__);
}
// generate triclops context from connected camera
void generateTriclopsContext(FC2::Camera &camera, TriclopsContext &context)
{
FC2::CameraInfo camInfo;
FC2::Error fc2Error = camera.GetCameraInfo(&camInfo);
handleError("FlyCapture2::Camera::GetCameraInfo()", fc2Error, __LINE__);
FC2T::ErrorType fc2TriclopsError;
fc2TriclopsError = FC2T::getContextFromCamera(camInfo.serialNumber, &context);
handleError("Fc2Triclops::getContextFromCamera()", fc2TriclopsError, __LINE__);
}
// generate color stereo input
void generateColorStereoInput( TriclopsContext const &context,
FC2::Image const &grabbedImage,
ImageContainer &imageCont,
TriclopsColorStereoPair &stereoPair)
{
FC2T::ErrorType fc2TriclopsError;
TriclopsError te;
TriclopsColorImage triclopsImageContainer[2];
FC2::Image *tmpImage = imageCont.tmp;
FC2::Image *unprocessedImage = imageCont.unprocessed;
// Convert the pixel interleaved raw data to de-interleaved and color processed data
fc2TriclopsError = FC2T::unpackUnprocessedRawOrMono16Image( grabbedImage,
true /*assume little endian*/,
tmpImage[RIGHT],
tmpImage[LEFT]);
handleError("Fc2Triclops::unpackUnprocessedRawOrMono16Image()", fc2TriclopsError, __LINE__);
// preprocess color image
for (int i = 0; i < 2; ++i) {
FC2::Error fc2Error;
fc2Error = tmpImage[i].SetColorProcessing(FC2::HQ_LINEAR);
handleError("FlyCapture2::Image::SetColorProcessing()", fc2Error, __LINE__);
// convert preprocessed color image to BGRU format
fc2Error = tmpImage[i].Convert(FC2::PIXEL_FORMAT_BGRU,&unprocessedImage[i]);
handleError("FlyCapture2::Image::Convert()", fc2Error, __LINE__);
}
// create triclops image for right and left lens
for (size_t i = 0; i < 2; ++i) {
TriclopsColorImage *image = &triclopsImageContainer[i];
te = triclopsLoadColorImageFromBuffer(
reinterpret_cast<TriclopsColorPixel *>(unprocessedImage[i].GetData()),
unprocessedImage[i].GetRows(),
unprocessedImage[i].GetCols(),
unprocessedImage[i].GetStride(),
image);
triclops_examples::handleError("triclopsLoadColorImageFromBuffer()", te, __LINE__);
}
// create stereo input from the triclops images constructed above
// pack image data into a TriclopsColorStereoPair structure
te = triclopsBuildColorStereoPairFromBuffers(
context,
&triclopsImageContainer[RIGHT],
&triclopsImageContainer[LEFT],
&stereoPair);
triclops_examples::handleError("triclopsBuildColorStereoPairFromBuffers()", te, __LINE__);
}
void handleError(const std::string &description, const FlyCapture2::Error &error, int lineNumber)
{
if (error != FlyCapture2::PGRERROR_OK) {
throw triclops_examples::Example_Exception(description, error.GetDescription(), lineNumber);
}
}
void handleError(const std::string &description, const Fc2Triclops::ErrorType &error, int lineNumber)
{
if (error != Fc2Triclops::ErrorType::ERRORTYPE_OK) {
throw triclops_examples::Example_Exception(description, "FlyCapture 2 bridge error", lineNumber);
}
}
分離未標定原圖的代碼如下:
//Retrieve an image
FlyCapture2::Image rawImage;
error = cam.RetrieveBuffer(&rawImage);
if (error != PGRERROR_OK)
{
PrintError(error);
return -1;
}
//Separate the rawImage into right and left image
FlyCapture2::Image rawImgR, rawImgL;
unpackUnprocessedRawOrMono16Image(rawImage, true, rawImgR, rawImgL);
rawImgR.SetColorProcessing(HQ_LINEAR);
rawImgL.SetColorProcessing(HQ_LINEAR);
//Convert to RGB
FlyCapture2::Image rgbImgR, rgbImgL;
rawImgR.Convert(PIXEL_FORMAT_BGR, &rgbImgR);
rawImgL.Convert(PIXEL_FORMAT_BGR, &rgbImgL);
//convert to OpenCV Mat
unsigned int rowBytes = (double)rgbImgR.GetReceivedDataSize() / (double)rgbImgR.GetRows();
cv::Mat imgR(rgbImgR.GetRows(), rgbImgR.GetCols(), CV_8UC3, rgbImgR.GetData(), rowBytes);
cv::Mat imgL(rgbImgL.GetRows(), rgbImgL.GetCols(), CV_8UC3, rgbImgL.GetData(), rowBytes);
SDK使用
官方在SDK安裝包中給出了很多例子和相應的API介紹文檔(主要有用的是Triclops_API_Documentation.chm、FlyCapture2 Documentation.chm),主要分爲兩個部分,分別對應於Triclops SDK安裝包和FlyCapture安裝包,結合例子和API可以很快上手使用,這裏進行簡單的介紹,方便大家查閱。
FlyCapture安裝包
FlyCapture安裝包裏提供的主要是一些較爲底層的API和例子,實現的是操作相機的基本功能,如採集相機圖像、設置相機參數等,文件路徑如下:
- C:\Program Files\Point Grey Research\FlyCapture2\src
- C:\Program Files\Point Grey Research\FlyCapture2\doc
例子如下:
Triclops SDK安裝包
Triclops SDK提供的是對一些計算機立體視覺功能的封裝,如實時獲取深度點雲、生成視差圖、校正畸變、OpenCV接口等,文件路徑如下:
- C:\Program Files\FLIR Integrated Imaging Solutions\Triclops Stereo Vision SDK\examples
- C:\Program Files\FLIR Integrated Imaging Solutions\Triclops Stereo Vision SDK\doc\API
例子如下:
備註
- 另附完整的工程+相關手冊的【雲盤下載鏈接】:ht?tps://pan.?baidu.c?om/s/1wvodHEKxOI4-fSbD1rJK6g
提取碼:a48b (去掉鏈接中的"?"可用) - 更多格式轉換方法參考:Triclops圖片格式轉換成OpenCV Mat格式
- 歡迎大家留言批評指正,互相交流、共同進步。