【轉】PCL室內三維重建

手頭有三個prime sensor攝像頭,分別固定在不同角度,打算根據RGBD信息,將三個攝像頭的點雲數據拼接起來。

設備限制+能力不足,一直沒有把point cloud library 1.8環境搭建起來,因此無法實時讀取點雲信息。此外,筆記本電腦USB芯片總線中斷協議限制,亦無法同時使用三個攝像頭。

在如此坑爹的境地,分享下我是怎麼搞三維重建的。。。。


本文環境win7+vs2012+opencv2.4.9+openni2.0+pcl1.7.2


一、點雲數據獲取

1.採用openni2.0 採集depth和rgb數據

2.OpenCV實時顯示(否則你不知道採集的是啥),藉助IplImage接口拷貝RGB數據給cloudpoint(直接cloudpoint之間賦值結果不對)

3.利用PCL的IO接口,將數據倒騰到PCD文件(PCD採用binary形式保存,ASCII性能實在太差)

  1. #include <pcl/io/pcd_io.h>  
  2. #include <pcl/point_types.h>  
  3. // 標準庫頭文件  
  4. #include <iostream>  
  5. #include <string>  
  6. #include <vector>   
  7. // OpenCV頭文件  
  8. #include <opencv2\core\core.hpp>  
  9. #include <opencv2\highgui\highgui.hpp>  
  10. #include <opencv2\imgproc\imgproc.hpp>   
  11. // OpenNI頭文件  
  12. #include <OpenNI.h>   
  13. typedef unsigned char uint8_t;  
  14. // namespace  
  15. using namespace std;  
  16. using namespace openni;  
  17. using namespace cv;  
  18. using namespace pcl;  
  19.   
  20. void CheckOpenNIError( Status result, string status )    
  21. {     
  22.     if( result != STATUS_OK )     
  23.         cerr << status << ” Error: ” << OpenNI::getExtendedError() << endl;    
  24. }   
  25.   
  26. int main( int argc, char **argv )  
  27. {  
  28.     Status result = STATUS_OK;  
  29.     int i,j;  
  30.     float x=0.0,y=0.0,z=0.0,xx=0.0;    
  31.     //IplImage *test,*test2;  
  32.     IplImage *test2;  
  33.   
  34.     //point cloud   
  35.     PointCloud<PointXYZRGB> cloud;  
  36.   
  37.     //opencv image  
  38.     Mat cvBGRImg;   
  39.     Mat cvDepthImg;    
  40.   
  41.     //OpenNI2 image    
  42.     VideoFrameRef oniDepthImg;    
  43.     VideoFrameRef oniColorImg;  
  44.   
  45.     namedWindow(”depth”);    
  46.     namedWindow(”image”);   
  47.   
  48.     char key=0;  
  49.   
  50.     // 初始化OpenNI    
  51.     result = OpenNI::initialize();  
  52.     CheckOpenNIError( result, ”initialize context” );   
  53.       
  54.     // open device      
  55.     Device device;    
  56.     result = device.open( openni::ANY_DEVICE );   
  57.     CheckOpenNIError( result, ”open device” );  
  58.   
  59.   
  60.     // create depth stream     
  61.     VideoStream oniDepthStream;    
  62.     result = oniDepthStream.create( device, openni::SENSOR_DEPTH );  
  63.     CheckOpenNIError( result, ”create depth stream” );  
  64.     
  65.     // set depth video mode    
  66.     VideoMode modeDepth;    
  67.     modeDepth.setResolution( 640, 480 );    
  68.     modeDepth.setFps( 30 );    
  69.     modeDepth.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );    
  70.     oniDepthStream.setVideoMode(modeDepth);    
  71.     // start depth stream    
  72.     result = oniDepthStream.start();   
  73.     CheckOpenNIError( result, ”start depth stream” );  
  74.      
  75.     // create color stream    
  76.     VideoStream oniColorStream;    
  77.     result = oniColorStream.create( device, openni::SENSOR_COLOR );    
  78.     CheckOpenNIError( result, ”create color stream” );  
  79.     // set color video mode    
  80.     VideoMode modeColor;    
  81.     modeColor.setResolution( 640, 480 );    
  82.     modeColor.setFps( 30 );    
  83.     modeColor.setPixelFormat( PIXEL_FORMAT_RGB888 );    
  84.     oniColorStream.setVideoMode( modeColor);   
  85.     // start color stream    
  86.     result = oniColorStream.start();   
  87.     CheckOpenNIError( result, ”start color stream” );  
  88.   
  89.     while(true)  
  90.     {  
  91.         // read frame    
  92.         if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )    
  93.         {    
  94.             // convert data into OpenCV type    
  95.             Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );    
  96.             cvtColor( cvRGBImg, cvBGRImg, CV_RGB2BGR );    
  97.             imshow( ”image”, cvBGRImg );    
  98.         }    
  99.   
  100.         if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )    
  101.         {    
  102.             Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );    
  103.             cvRawImg16U.convertTo( cvDepthImg, CV_8U, 255.0/(oniDepthStream.getMaxPixelValue()));      
  104.             imshow( ”depth”, cvDepthImg );    
  105.         }    
  106.         // quit  
  107.         if( cv::waitKey( 1 ) == ‘q’ )        
  108.             break;  
  109.         // capture  depth and color data     
  110.         if( cv::waitKey( 1 ) == ‘c’ )  
  111.         {  
  112.             //get data  
  113.             DepthPixel *pDepth = (DepthPixel*)oniDepthImg.getData();  
  114.             //create point cloud  
  115.             cloud.width = oniDepthImg.getWidth();  
  116.             cloud.height = oniDepthImg.getHeight();  
  117.             cloud.is_dense = false;  
  118.             cloud.points.resize(cloud.width * cloud.height);  
  119.             //test = cvCreateImage(cvSize(cloud.width,cloud.height),IPL_DEPTH_8U,3);  
  120.             test2 = &IplImage(cvBGRImg);  
  121.   
  122.             for(i=0;i<oniDepthImg.getHeight();i++)  
  123.             {  
  124.                  for(j=0;j<oniDepthImg.getWidth();j++)  
  125.                  {  
  126.                      float k = i;    
  127.                      float m = j;   
  128.                      xx = pDepth[i*oniDepthImg.getWidth()+j];  
  129.                      CoordinateConverter::convertDepthToWorld (oniDepthStream,m,k,xx,&x,&y,&z);   
  130.                      cloud[i*cloud.width+j].x = x;  
  131.                      cloud[i*cloud.width+j].y = y;  
  132.                      cloud[i*cloud.width+j].z = xx;  
  133.                      cloud[i*cloud.width+j].b = (uint8_t)test2->imageData[i*test2->widthStep+j*3+0];  
  134.                      cloud[i*cloud.width+j].g = (uint8_t)test2->imageData[i*test2->widthStep+j*3+1];  
  135.                      cloud[i*cloud.width+j].r = (uint8_t)test2->imageData[i*test2->widthStep+j*3+2];  
  136.                     /* test->imageData[i*test->widthStep+j*3+0] = test2->imageData[i*test2->widthStep+j*3+0]; 
  137.                      test->imageData[i*test->widthStep+j*3+1] = test2->imageData[i*test2->widthStep+j*3+1]; 
  138.                      test->imageData[i*test->widthStep+j*3+2] = test2->imageData[i*test2->widthStep+j*3+2];*/  
  139.                  }  
  140.              }  
  141.               
  142.             //cvSaveImage(“test.jpg”,test);  
  143.             pcl::io::savePCDFileBinaryCompressed(”test_pcdc.pcd”,cloud);  
  144.             cerr<<”Saved ”<<cloud.points.size()<<“ data points to test_pcd.pcd.”<<endl;  
  145.             imwrite(”c_color.jpg”,cvBGRImg);  
  146.             imwrite(”c_depth.jpg”,cvDepthImg);  
  147.             /*for(size_t i=0;i<cloud.points.size();++i) 
  148.             cerr<<”    ”<<cloud.points[i].x<<” ”<<cloud.points[i].y<<” ”<<cloud.points[i].z<<endl;*/  
  149.         }  
  150.     }  
  151. }  
#include <pcl/io/pcd_io.h>




#include <pcl/point_types.h> // 標準庫頭文件 #include <iostream> #include <string> #include <vector> // OpenCV頭文件 #include <opencv2\core\core.hpp> #include <opencv2\highgui\highgui.hpp> #include <opencv2\imgproc\imgproc.hpp> // OpenNI頭文件 #include <OpenNI.h> typedef unsigned char uint8_t; // namespace using namespace std; using namespace openni; using namespace cv; using namespace pcl; void CheckOpenNIError( Status result, string status ) { if( result != STATUS_OK ) cerr << status << " Error: " << OpenNI::getExtendedError() << endl; } int main( int argc, char **argv ) { Status result = STATUS_OK; int i,j; float x=0.0,y=0.0,z=0.0,xx=0.0; //IplImage *test,*test2; IplImage *test2; //point cloud PointCloud<PointXYZRGB> cloud; //opencv image Mat cvBGRImg; Mat cvDepthImg; //OpenNI2 image VideoFrameRef oniDepthImg; VideoFrameRef oniColorImg; namedWindow("depth"); namedWindow("image"); char key=0; // 初始化OpenNI result = OpenNI::initialize(); CheckOpenNIError( result, "initialize context" ); // open device Device device; result = device.open( openni::ANY_DEVICE ); CheckOpenNIError( result, "open device" ); // create depth stream VideoStream oniDepthStream; result = oniDepthStream.create( device, openni::SENSOR_DEPTH ); CheckOpenNIError( result, "create depth stream" ); // set depth video mode VideoMode modeDepth; modeDepth.setResolution( 640, 480 ); modeDepth.setFps( 30 ); modeDepth.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM ); oniDepthStream.setVideoMode(modeDepth); // start depth stream result = oniDepthStream.start(); CheckOpenNIError( result, "start depth stream" ); // create color stream VideoStream oniColorStream; result = oniColorStream.create( device, openni::SENSOR_COLOR ); CheckOpenNIError( result, "create color stream" ); // set color video mode VideoMode modeColor; modeColor.setResolution( 640, 480 ); modeColor.setFps( 30 ); modeColor.setPixelFormat( PIXEL_FORMAT_RGB888 ); oniColorStream.setVideoMode( modeColor); // start color stream result = oniColorStream.start(); CheckOpenNIError( result, "start color stream" ); while(true) { // read frame if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK ) { // convert data into OpenCV type Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() ); cvtColor( cvRGBImg, cvBGRImg, CV_RGB2BGR ); imshow( "image", cvBGRImg ); } if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK ) { Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() ); cvRawImg16U.convertTo( cvDepthImg, CV_8U, 255.0/(oniDepthStream.getMaxPixelValue())); imshow( "depth", cvDepthImg ); } // quit if( cv::waitKey( 1 ) == 'q' ) break; // capture depth and color data if( cv::waitKey( 1 ) == 'c' ) { //get data DepthPixel *pDepth = (DepthPixel*)oniDepthImg.getData(); //create point cloud cloud.width = oniDepthImg.getWidth(); cloud.height = oniDepthImg.getHeight(); cloud.is_dense = false; cloud.points.resize(cloud.width * cloud.height); //test = cvCreateImage(cvSize(cloud.width,cloud.height),IPL_DEPTH_8U,3); test2 = &IplImage(cvBGRImg); for(i=0;i<oniDepthImg.getHeight();i++) { for(j=0;j<oniDepthImg.getWidth();j++) { float k = i; float m = j; xx = pDepth[i*oniDepthImg.getWidth()+j]; CoordinateConverter::convertDepthToWorld (oniDepthStream,m,k,xx,&x,&y,&z); cloud[i*cloud.width+j].x = x; cloud[i*cloud.width+j].y = y; cloud[i*cloud.width+j].z = xx; cloud[i*cloud.width+j].b = (uint8_t)test2->imageData[i*test2->widthStep+j*3+0]; cloud[i*cloud.width+j].g = (uint8_t)test2->imageData[i*test2->widthStep+j*3+1]; cloud[i*cloud.width+j].r = (uint8_t)test2->imageData[i*test2->widthStep+j*3+2]; /* test->imageData[i*test->widthStep+j*3+0] = test2->imageData[i*test2->widthStep+j*3+0]; test->imageData[i*test->widthStep+j*3+1] = test2->imageData[i*test2->widthStep+j*3+1]; test->imageData[i*test->widthStep+j*3+2] = test2->imageData[i*test2->widthStep+j*3+2];*/ } } //cvSaveImage("test.jpg",test); pcl::io::savePCDFileBinaryCompressed("test_pcdc.pcd",cloud); cerr<<"Saved "<<cloud.points.size()<<" data points to test_pcd.pcd."<<endl; imwrite("c_color.jpg",cvBGRImg); imwrite("c_depth.jpg",cvDepthImg); /*for(size_t i=0;i<cloud.points.size();++i) cerr<<" "<<cloud.points[i].x<<" "<<cloud.points[i].y<<" "<<cloud.points[i].z<<endl;*/ } } }
按C鍵,獲取點雲信息,按P鍵退出

我們得到下面三組數據:




二、點雲展示

這個沒啥東西,直接讀取PCD,調用show即可

  1. #include <pcl/visualization/cloud_viewer.h>  
  2. #include <iostream>  
  3. #include <pcl/io/io.h>  
  4. #include <pcl/io/pcd_io.h>  
  5.       
  6. int main ()  
  7. {  
  8.     pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGB>);  
  9.   
  10.     if(pcl::io::loadPCDFile<pcl::PointXYZRGB>(“test_pcda.pcd”,*cloud)==-1)//*打開點雲文件  
  11.     {  
  12.         PCL_ERROR(”Couldn’t read file test_pcd.pcd\n”);  
  13.         return(-1);  
  14.     }  
  15.     std::cout<<”Loaded ”<<cloud->width*cloud->height<<“ data points from test_pcd.pcd with the following fields: ”<<std::endl;  
  16.   
  17.     pcl::visualization::CloudViewer viewer(”My First Cloud Viewer”);  
  18.     viewer.showCloud(cloud);  
  19.     while(!viewer.wasStopped())  
  20.     {  
  21.   
  22.     }  
  23. }  
#include <pcl/visualization/cloud_viewer.h>




#include <iostream> #include <pcl/io/io.h> #include <pcl/io/pcd_io.h> int main () { pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGB>); if(pcl::io::loadPCDFile<pcl::PointXYZRGB>("test_pcda.pcd",*cloud)==-1)//*打開點雲文件 { PCL_ERROR("Couldn't read file test_pcd.pcd\n"); return(-1); } std::cout<<"Loaded "<<cloud->width*cloud->height<<" data points from test_pcd.pcd with the following fields: "<<std::endl; pcl::visualization::CloudViewer viewer("My First Cloud Viewer"); viewer.showCloud(cloud); while(!viewer.wasStopped()) { } }
三組數據對應結果:





三、點雲拼接

  我直接拿原始數據讓ICP處理,所以速度非常慢!關於ICP解釋,摘錄自《點雲庫PCL學習教程》

Iterative Closest Point,迭代最近點算法,對待拼接的2片點雲,首先根據一定的準則確立對應點集P與Q,其中對應點對的個數爲N。然後通過最小二乘法迭代計算最優的座標變換,即旋轉矩陣R和平移矢量t,使得誤差函數最小。ICP算法計算簡便直觀且可使拼接具有較好的精度,但是算法的運行速度以及向全局最優的收斂性卻在很大程度上依賴於給定的初始變換估計以及在迭代過程中對應關係的確立。各種粗拼接技術可爲ICP算法提供較好的初始位置,所以迭代過程中確立正確對應點集以避免迭代陷入局部極值成爲各種改進算法的關鍵,決定了算法的收斂速度與最終的拼接精度。

ICP處理流程:

1.對原始點雲數據進行採樣

2.確定初始對應點集

3.去除錯誤對應點對

4.座標變換的求解

  1. #include <iostream>  
  2. #include <pcl/io/pcd_io.h>  
  3. #include <pcl/point_types.h>  
  4. #include <pcl/registration/icp.h>  
  5. #include <pcl/visualization/cloud_viewer.h>  
  6. #include <pcl/kdtree/kdtree_flann.h>  
  7. #include <time.h>  
  8.   
  9. int  main (int argc, char** argv)  
  10. {  
  11.   clock_t start,finish;  
  12.   double totaltime;  
  13.   
  14.    
  15.   pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_in (new pcl::PointCloud<pcl::PointXYZRGB>);  
  16.   pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_out (new pcl::PointCloud<pcl::PointXYZRGB>);  
  17.   pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_out2 (new pcl::PointCloud<pcl::PointXYZRGB>);  
  18.   pcl::PointCloud<pcl::PointXYZRGB>::Ptr my_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);  
  19.   pcl::PointCloud<pcl::PointXYZRGB>::Ptr my_cloud2 (new pcl::PointCloud<pcl::PointXYZRGB>);  
  20.     
  21.   start=clock();  
  22.   if(pcl::io::loadPCDFile<pcl::PointXYZRGB>(“test_pcda.pcd”,*cloud_in)==-1)//*打開點雲文件  
  23.   {  
  24.      PCL_ERROR(”Couldn’t read file test_pcd.pcd\n”);  
  25.      return(-1);  
  26.   }  
  27.   finish=clock();  
  28.   totaltime = (double)(finish-start)/CLOCKS_PER_SEC;  
  29.   cout<<”\n load test_pcda.pcd data : ”<<totaltime<<“seconds!”<<endl;  
  30.   
  31.   start=clock();  
  32.   if(pcl::io::loadPCDFile<pcl::PointXYZRGB>(“test_pcdb.pcd”,*cloud_out)==-1)//*打開點雲文件  
  33.   {  
  34.         PCL_ERROR(”Couldn’t read file test_pcd.pcd\n”);  
  35.         return(-1);  
  36.   }  
  37.   finish=clock();  
  38.   totaltime = (double)(finish-start)/CLOCKS_PER_SEC;  
  39.   cout<<”\n load test_pcdb.pcd data : ”<<totaltime<<“seconds!”<<endl;  
  40.   
  41.   start=clock();  
  42.   if(pcl::io::loadPCDFile<pcl::PointXYZRGB>(“test_pcdc.pcd”,*cloud_out2)==-1)//*打開點雲文件  
  43.   {  
  44.         PCL_ERROR(”Couldn’t read file test_pcd.pcd\n”);  
  45.         return(-1);  
  46.   }  
  47.   finish=clock();  
  48.   totaltime = (double)(finish-start)/CLOCKS_PER_SEC;  
  49.   cout<<”\n load test_pcdc.pcd data : ”<<totaltime<<“seconds!”<<endl;  
  50.   
  51.   //call icp api  
  52.   start=clock();  
  53.   pcl::IterativeClosestPoint<pcl::PointXYZRGB, pcl::PointXYZRGB> icp;  
  54.   icp.setInputCloud(cloud_in);  
  55.   icp.setInputTarget(cloud_out);  
  56.   pcl::PointCloud<pcl::PointXYZRGB> Final;  
  57.   icp.align(Final);  
  58.   std::cout << ”has converged:” << icp.hasConverged() << “ score: ” <<  
  59.   icp.getFitnessScore() << std::endl;  
  60.   std::cout << icp.getFinalTransformation() << std::endl;  
  61.     
  62.   finish=clock();  
  63.   totaltime = (double)(finish-start)/CLOCKS_PER_SEC;  
  64.   cout<<”\n first time call icp process : ”<<totaltime<<“seconds!”<<endl;  
  65.   
  66.   //構造拼接臨時的點雲  
  67.   for(int i=0;i< Final.points.size();i++)  
  68.   {  
  69.         pcl::PointXYZRGB basic_point;  
  70.         basic_point.x = Final.points[i].x;  
  71.         basic_point.y = Final.points[i].y;  
  72.         basic_point.z = Final.points[i].z;  
  73.         basic_point.r = Final.points[i].r;  
  74.         basic_point.g = Final.points[i].g;  
  75.         basic_point.b = Final.points[i].b;  
  76.         my_cloud->points.push_back(basic_point);  
  77.   }  
  78.   
  79.   //call icp api another time  
  80.   start=clock();  
  81.   icp.setInputCloud(cloud_out2);  
  82.   icp.setInputTarget(my_cloud);  
  83.   pcl::PointCloud<pcl::PointXYZRGB> Final2;  
  84.   icp.align(Final2);  
  85.   std::cout << ”has converged:” << icp.hasConverged() << “ score: ” <<  
  86.   icp.getFitnessScore() << std::endl;  
  87.   std::cout << icp.getFinalTransformation() << std::endl;  
  88.   finish=clock();  
  89.   totaltime = (double)(finish-start)/CLOCKS_PER_SEC;  
  90.   cout<<”\n second time call icp process : ”<<totaltime<<“seconds!”<<endl;  
  91.   
  92.   //my_cloud.reset();  
  93.    //構造拼接最終的點雲  
  94.   for(int i=0;i< Final2.points.size();i++)  
  95.   {  
  96.         pcl::PointXYZRGB basic_point;  
  97.         basic_point.x = Final2.points[i].x;  
  98.         basic_point.y = Final2.points[i].y;  
  99.         basic_point.z = Final2.points[i].z;  
  100.         basic_point.r = Final2.points[i].r;  
  101.         basic_point.g = Final2.points[i].g;  
  102.         basic_point.b = Final2.points[i].b;  
  103.         my_cloud2->points.push_back(basic_point);  
  104.   }  
  105.   
  106.   pcl::visualization::CloudViewer viewer(”My First Cloud Viewer”);  
  107.   viewer.showCloud(my_cloud2);  
  108.   while(!viewer.wasStopped())  
  109.   {  
  110.   
  111.   }  
  112.   return (0);  
  113. }  
#include <iostream>




#include <pcl/io/pcd_io.h> #include <pcl/point_types.h> #include <pcl/registration/icp.h> #include <pcl/visualization/cloud_viewer.h> #include <pcl/kdtree/kdtree_flann.h> #include <time.h> int main (int argc, char** argv) { clock_t start,finish; double totaltime; pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_in (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_out (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_out2 (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr my_cloud (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr my_cloud2 (new pcl::PointCloud<pcl::PointXYZRGB>); start=clock(); if(pcl::io::loadPCDFile<pcl::PointXYZRGB>("test_pcda.pcd",*cloud_in)==-1)//*打開點雲文件 { PCL_ERROR("Couldn't read file test_pcd.pcd\n"); return(-1); } finish=clock(); totaltime = (double)(finish-start)/CLOCKS_PER_SEC; cout<<"\n load test_pcda.pcd data : "<<totaltime<<"seconds!"<<endl; start=clock(); if(pcl::io::loadPCDFile<pcl::PointXYZRGB>("test_pcdb.pcd",*cloud_out)==-1)//*打開點雲文件 { PCL_ERROR("Couldn't read file test_pcd.pcd\n"); return(-1); } finish=clock(); totaltime = (double)(finish-start)/CLOCKS_PER_SEC; cout<<"\n load test_pcdb.pcd data : "<<totaltime<<"seconds!"<<endl; start=clock(); if(pcl::io::loadPCDFile<pcl::PointXYZRGB>("test_pcdc.pcd",*cloud_out2)==-1)//*打開點雲文件 { PCL_ERROR("Couldn't read file test_pcd.pcd\n"); return(-1); } finish=clock(); totaltime = (double)(finish-start)/CLOCKS_PER_SEC; cout<<"\n load test_pcdc.pcd data : "<<totaltime<<"seconds!"<<endl; //call icp api start=clock(); pcl::IterativeClosestPoint<pcl::PointXYZRGB, pcl::PointXYZRGB> icp; icp.setInputCloud(cloud_in); icp.setInputTarget(cloud_out); pcl::PointCloud<pcl::PointXYZRGB> Final; icp.align(Final); std::cout << "has converged:" << icp.hasConverged() << " score: " << icp.getFitnessScore() << std::endl; std::cout << icp.getFinalTransformation() << std::endl; finish=clock(); totaltime = (double)(finish-start)/CLOCKS_PER_SEC; cout<<"\n first time call icp process : "<<totaltime<<"seconds!"<<endl; //構造拼接臨時的點雲 for(int i=0;i< Final.points.size();i++) { pcl::PointXYZRGB basic_point; basic_point.x = Final.points[i].x; basic_point.y = Final.points[i].y; basic_point.z = Final.points[i].z; basic_point.r = Final.points[i].r; basic_point.g = Final.points[i].g; basic_point.b = Final.points[i].b; my_cloud->points.push_back(basic_point); } //call icp api another time start=clock(); icp.setInputCloud(cloud_out2); icp.setInputTarget(my_cloud); pcl::PointCloud<pcl::PointXYZRGB> Final2; icp.align(Final2); std::cout << "has converged:" << icp.hasConverged() << " score: " << icp.getFitnessScore() << std::endl; std::cout << icp.getFinalTransformation() << std::endl; finish=clock(); totaltime = (double)(finish-start)/CLOCKS_PER_SEC; cout<<"\n second time call icp process : "<<totaltime<<"seconds!"<<endl; //my_cloud.reset(); //構造拼接最終的點雲 for(int i=0;i< Final2.points.size();i++) { pcl::PointXYZRGB basic_point; basic_point.x = Final2.points[i].x; basic_point.y = Final2.points[i].y; basic_point.z = Final2.points[i].z; basic_point.r = Final2.points[i].r; basic_point.g = Final2.points[i].g; basic_point.b = Final2.points[i].b; my_cloud2->points.push_back(basic_point); } pcl::visualization::CloudViewer viewer("My First Cloud Viewer"); viewer.showCloud(my_cloud2); while(!viewer.wasStopped()) { } return (0); }


運行結果好像不大對頭,收斂失敗了(三幅點雲圖像之間的交集過小所致。。)



換一個順序也不對:




四、編譯錯誤解決

代碼寫的是很簡單,但是各種開源的東西湊到一起就蛋疼了,這不遇到了opencv中的flann模塊與pcl中的flann模塊的衝突!

給大家講講我是怎麼發現的。。。

問題現象:


有個坑爹結構體,跑出來了!!咋辦?

source insight導入point cloud library 所有源碼,再把flann第三方所有頭文件導入,關聯後會找到這個坑爹結構體


然後看看有誰調用它,Flann.hpp


它位於pcl第三方目錄下


到現在位置我覺得沒啥問題,由於《學習教程》中沒有引用flann.hpp頭文件,我想嘗試引用一下會不會解決問題呢,後來vs2012的智能關聯功能,幫了我大忙!


由於本次代碼沒有用到opencv的第三方庫,果斷幹掉opencv2這塊頭文件



萬事大吉!



發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章