奧比中光(Astra S)開發(兩個相機分別獲得點雲和RGB圖像)

對Astra S深度相機的操作又進行封裝一次,工程文件和第三方庫文件在我的資源中有下載

工程包含目錄爲:

//按照自己的路徑進行更改
D:\Program Files\ThirdParty\OpenNI2\Include
D:\Program Files\ThirdParty\OpenCV241\Include
D:\Program Files\ThirdParty\Eigen3.2.5

庫目錄爲:

//按照自己的路徑進行更改
D:\Program Files\ThirdParty\OpenNI2\Lib
D:\Program Files\ThirdParty\OpenCV241\Lib

附加依賴項

glut32.lib
OpenNI2.lib
opencv_calib3d2410.lib
opencv_core2410.lib
opencv_highgui2410.lib

.h文件

#include <OpenNI.h>
#include <iostream>
#include <ctime>
#include "opencv/highgui.h"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include <fstream>
#include <omp.h>

#define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000ms

using namespace std;
using namespace cv;
using namespace openni;


typedef struct
{
	/* Red value of this pixel. */
	uint8_t b;
	/* Green value of this pixel. */
	uint8_t g;
	/* Blue value of this pixel. */
	uint8_t r;
} OniBGR888Pixel;

class CameraOperate
{

private:
	openni::Device device1;
	openni::Device device2;
	VideoStream depth1, color1;
	openni::VideoFrameRef m_depth_frame1;
	openni::VideoFrameRef m_RGB_frame1;

	openni::VideoFrameRef m_depth_frame2;
	openni::VideoFrameRef m_RGB_frame2;

	openni::VideoFrameRef frame1;
	
	VideoStream depth2, color2;
	openni::VideoFrameRef frame2;

	int readyStream ;
	float vertex[1024][1280][3];
	int mHeight ;
	int mWidth ;

public:
	CameraOperate();
	~CameraOperate();

	int InitializeDevice();                                              //傳感器的初始化函數
	int OpenDevice(const char* uri,int Num);                                     //打開傳感器函數
	int CloseDevice(int Num);                                                   //關閉傳感器函數
	int CreateRGBStream(int Num);                                               //創建RGB_IR流
	int CreateDepthStream(int Num);                                             //創建Depth流
	int StartRGBStream(int Num);                                                //開始RGB_IR流
	int SatrtDepthStream(int Num);                                              //開始Depth流
	int StopRGBStream(int Num);                                                 //暫停RGB_IR流
	int StopDepthStream(int Num);                                               //暫停Depth流
	int DestoryRGBStream();                                              //暫停和銷燬RGB_IR流
	int DestoryDepthStream(int Num);                                            //暫停和銷燬Depth流
	int ReadRGBDepth_Frame(Mat& color, Mat& depth, int Num);                     //讀取RGB、IR和Depth


	void ConvertProjectiveToWorld(int u, int v, int z, float& px, float& py, float& pz);
	void TransformPointToPoint(float dst[3], const float src[3]);
	void ConvertWorldToProjective(float x, float y, float z, float& u, float& v);
	int FrameToDepth(const VideoFrameRef& frame, Mat& depth);
	void MappingDepth2Color(cv::Mat &src, cv::Mat &dst);
	int MatToPointCloud(Mat depthMap, Mat colorMap);
	int SavePointCloud(const char* fileName);
	int FrameToRGB(const VideoFrameRef &frame, Mat &color);
	
};

.cpp文件

#include "CCameraOperate.h"

//構造函數
 CameraOperate::CameraOperate()
{
	 readyStream = -1;
	 mHeight = 0;
	 mWidth = 0;
}

 //析構函數
 CameraOperate::~CameraOperate()
 {
 
 }

 //初始化函數
 int CameraOperate::InitializeDevice()
 {
	 openni::Status rc = openni::STATUS_OK;
	 rc = openni::OpenNI::initialize();
	 if (rc != openni::STATUS_OK)
	 {
		 printf("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
		 return 1;
	 }
	 return 0;
 
 }

 //開啓設備
 int CameraOperate::OpenDevice(const char* uri,int Num)
 {
	 if (Num == 1)
	 {
		 openni::Status rc = openni::STATUS_OK;
		 rc = device1.open(uri);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't open device1\n%s\n", openni::OpenNI::getExtendedError());
			 return 2;
		 }
	 }
	 else
	 {
		 openni::Status rc = openni::STATUS_OK;
		 rc = device2.open(uri);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't open device2\n%s\n", openni::OpenNI::getExtendedError());
			 return 2;
		 }
	 }
	 return 0;
 }

 //關閉設備
 int  CameraOperate::CloseDevice(int Num)
 {
	 if (Num == 1)
	 {
		 device1.close();
	 }
	 else
	 {
		 device2.close();
	 }
		 return 0;
 }

//創建RGB_IR流
 int CameraOperate::CreateRGBStream(int Num)
 {
	 if (Num == 1)
	 {
		 openni::Status rc = openni::STATUS_OK;
		 rc = color1.create(device1, openni::SENSOR_COLOR);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't Create RGB Stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 3;
		 }
	 }
	 else
	 {
		 openni::Status rc = openni::STATUS_OK;
		 rc = color2.create(device2, openni::SENSOR_COLOR);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't Create RGB Stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 3;
		 }
	 }
	 return 0;
 }

 int CameraOperate::CreateDepthStream(int Num)
 {
	 if (Num == 1)
	 {

		 openni::Status rc = openni::STATUS_OK;
		 rc = depth1.create(device1, openni::SENSOR_DEPTH);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't Create Depth Stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 4;
		 }
	 }
	 else
	 {
		 openni::Status rc = openni::STATUS_OK;
		 rc = depth2.create(device2, openni::SENSOR_DEPTH);
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't Create Depth Stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 4;
		 }
	 }
	 return 0;
 }
	
int CameraOperate::StartRGBStream(int Num)
	 {
		 if (Num==1)
		 {
			 openni::Status rc = color1.start();
			 if (rc != openni::STATUS_OK)
			 {
				 printf("Couldn't start the rgb stream\n%s\n", openni::OpenNI::getExtendedError());
				 return 5;
			 }
		 }
		 else
		 {

			 openni::Status rc = color2.start();
			 if (rc != openni::STATUS_OK)
			 {
				 printf("Couldn't start the rgb stream\n%s\n", openni::OpenNI::getExtendedError());
				 return 5;
			 }
		 }
		 return 0;
 }

 int CameraOperate::SatrtDepthStream(int Num)
 {
	 if (Num == 1)
	 {
		 openni::Status rc = openni::STATUS_OK;
		 if (!depth1.isValid())
		 {
			 int ba = 0;
			 return -1;
		 }
		 rc = depth1.start();
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't start the depth stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 6;
		 }
	 }
	 else
	 {
		 openni::Status rc = openni::STATUS_OK;
		 if (!depth2.isValid())
		 {
			 int ba = 0;
			 return -1;
		 }
		 rc = depth2.start();
		 if (rc != openni::STATUS_OK)
		 {
			 printf("Couldn't start the depth stream\n%s\n", openni::OpenNI::getExtendedError());
			 return 6;
		 }
	 }
	 return 0;
 }

//停止RGBStream
 int CameraOperate::StopRGBStream(int Num)
 {
	 if (Num == 1)
	 {
		 color1.stop();
	 }
	 else
	 {
		 color2.stop();
	 }
		 return 0;
	 
 }

 //停止DepthStream
 int CameraOperate::StopDepthStream(int Num)
 {
	 if (Num == 1)
	 {
		 if (!depth1.isValid())
		 {
			 int ba = 0;
			 return -1;
		 }
		 depth1.stop();
	 }
	 else
	 {
		 if (!depth2.isValid())
		 {
			 int ba = 0;
			 return -1;
		 }
		 depth2.stop();
	 }

	 return 0;

 }

 //銷燬RGBStream
 int CameraOperate::DestoryRGBStream()
 {
	 color1.destroy();
	 return 0;
 }
 
 //銷燬DepthStream
 int CameraOperate::DestoryDepthStream(int Num)
 {
	 if (Num == 1)
	 {
		 depth1.destroy();
	 }
	 else
	 {
		 depth2.destroy();
	 }
	 return 0;
 }

//讀取RGB、Depth一幀的數據並轉換成爲Mat形式的Color和Depth矩陣
 int CameraOperate::ReadRGBDepth_Frame(Mat& BGRcolor1, Mat& depth3,int Num)
 {
	
	 if (Num==1)
	 {
		 openni::Status rc = openni::STATUS_OK;
		 Mat  cvRGBImg1(480, 640, CV_8UC3);
		 VideoStream* streams_Depth1 =&depth1 ;
		 int readyStream_Depth1 = -1;
		 rc = OpenNI::waitForAnyStream(&streams_Depth1, 1, &readyStream_Depth1, SAMPLE_READ_WAIT_TIMEOUT);
		 if (rc == STATUS_OK)
		 {
			 depth1.readFrame(&m_depth_frame1);
			 //color1.readFrame(&frame1);

			 if (m_depth_frame1.isValid())
			 {
				 FrameToDepth(m_depth_frame1, depth3);
				
			 }

		 }


		 VideoStream* streams_RGB1 = &color1;
		 int readyStream_RGB1 = -1;
		 rc = OpenNI::waitForAnyStream(&streams_RGB1, 1, &readyStream_RGB1, SAMPLE_READ_WAIT_TIMEOUT);
		 if (rc == STATUS_OK)
		 {
			 color1.readFrame(&m_RGB_frame1);
			 //color1.readFrame(&frame1);

			 if (m_RGB_frame1.isValid())
			 {
				 
				 FrameToRGB(m_RGB_frame1, BGRcolor1);
			 }

		 }



	 }
	 else
	 {
		 openni::Status rc = openni::STATUS_OK;
		 Mat  cvRGBImg2(480, 640, CV_8UC3);
		 VideoStream* streams_Depth2 = &depth2;
		 int readyStream_Depth2 = -1;
		 rc = OpenNI::waitForAnyStream(&streams_Depth2, 1, &readyStream_Depth2, SAMPLE_READ_WAIT_TIMEOUT);
		 if (rc == STATUS_OK)
		 {
			 depth2.readFrame(&m_depth_frame2);
			 //color1.readFrame(&frame1);

			 if (m_depth_frame2.isValid())
			 {
				 FrameToDepth(m_depth_frame2, depth3);

			 }

		 }

		 VideoStream* streams_RGB2 = &color2;
		 int readyStream_RGB2 = -1;
		 rc = OpenNI::waitForAnyStream(&streams_RGB2, 1, &readyStream_RGB2, SAMPLE_READ_WAIT_TIMEOUT);
		 if (rc == STATUS_OK)
		 {
			 color2.readFrame(&m_RGB_frame2);
			
			 if (m_RGB_frame2.isValid())
			 {

				 FrameToRGB(m_RGB_frame2, BGRcolor1);
			 }

		 }
	 }
	 return 0;
 }

 void CameraOperate::ConvertProjectiveToWorld(int u, int v, int z, float& px, float& py, float& pz)
 {

	 float ifx, ify;
	 ifx = 1. / 550.31324;
	 ify = 1. / 530.95258;

	 float tx = (u - 308.68595) * ifx;
	 float ty = (v - 239.44117) * ify;
	 float x0 = tx;
	 float y0 = ty;

	for (int j = 0; j < 5; j++)
	{
		double r2 = tx * tx + ty * ty;
		double icdist = (1) / (1 + ((0.00000000 * r2 + 0.31352)*r2 - 0.07364)*r2);
		double deltaX = 2 * (-0.01153) * tx*ty + (-0.00525) * (r2 + 2 * tx*tx);
		double deltaY = (-0.01153) * (r2 + 2 * ty*ty) + 2 * (-0.00525) * tx*ty;
		tx = (x0 - deltaX)*icdist;
		ty = (y0 - deltaY)*icdist;
	}
	 
	 px = z * tx;
	 py = z * ty;
	 pz = z;
 }

 void CameraOperate::TransformPointToPoint(float dst[3], const float src[3])
 {
	 dst[0] = 0.99990161 * src[0] + -0.00319713 * src[1] + -0.01365809 * src[2] + 0.02504946;
	 dst[1] = 0.00327611 * src[0] + 0.99997802 * src[1] + 0.00576428 * src[2] + 0.00006017;
	 dst[2] = 0.01363936 * src[0] + -0.00580846 * src[1] + 0.99989011 * src[2] + -0.00260149;
 }

 void CameraOperate::ConvertWorldToProjective(float x, float y, float z, float& u, float& v)
 {
	
	 float tx = x / z;
	float ty = y / z;
	float r2 = tx*tx + ty*ty;
	float f = 1 + -0.01595141 * r2 + 0.02629677 * r2*r2 + 0.00000000 * r2*r2*r2;
	tx *= f;
	ty *= f;
	float dx = tx + 2 * 0.00000000 * tx*ty + 0.00000000 * (r2 + 2 * tx*tx);
	float dy = ty + 2 * 0.00000000 * tx*ty + 0.00000000 * (r2 + 2 * ty*ty);
	tx = dx;
	ty = dy;
	u = tx * 524.18651391 + 320.79175117;
	v = ty * 486.50458757 + 240.99268524;
 }

 int CameraOperate::FrameToDepth(const VideoFrameRef& frame, Mat& depth)
 {

	 uint16_t *pPixel;
	 for (int y = 0; y < 480; y++)
	 {
		 pPixel = ((uint16_t*)((char*)frame.getData() + ((int)(y)* frame.getStrideInBytes())));
		 uint16_t* data = (uint16_t*)depth.ptr<uchar>(y);
		 for (int x = 0; x < 640; x++)
		 {
			 *data++ = (*pPixel);
			 pPixel++;
		 }
	 }
	 return 0;
 }

 void CameraOperate::MappingDepth2Color(cv::Mat &src, cv::Mat &dst)
 {
	 double  z;
	 float pixel[2];
	 float point[3], to_point[3];
	 uint16_t u, v, d;
	 uint16_t u_rgb = 0, v_rgb = 0;
	 cv::Mat newdepth(dst.rows, dst.cols, CV_16UC1, cv::Scalar(0));
	 for (v = 0; v < src.rows; v++)
	 {
		 for (u = 0; u < src.cols; u++)
		 {
			 d = src.at<uint16_t>(v, u);
			 z = (double)d;

			 ConvertProjectiveToWorld(u, v, z, point[0], point[1], point[2]);
			 TransformPointToPoint(to_point, point);
			 ConvertWorldToProjective(to_point[0], to_point[1], to_point[2], pixel[0], pixel[1]);

			 u_rgb = (uint16_t)(pixel[0]);
			 v_rgb = (uint16_t)(pixel[1]);

			 if (u_rgb < 0 || u_rgb >= newdepth.cols || v_rgb < 0 || v_rgb >= newdepth.rows) continue;
			 uint16_t *val = (uint16_t *)newdepth.ptr<uchar>(v_rgb)+u_rgb;
			 *val = d;
		 }
	 }

	 dst = newdepth;
 }

 int CameraOperate::MatToPointCloud(Mat depthMap, Mat colorMap)
 {
	 int depthW = depthMap.cols;
	 int depthH = depthMap.rows;
	 mWidth = depthW;
	 mHeight = depthH;

	 // update vertex
#pragma omp parallel for
	 for (int v = 0; v < depthH; v++)
	 {
		 unsigned short* pDepth = (unsigned short*)depthMap.ptr<uchar>(v);

		 for (int u = 0; u < depthW; u++)
		 {
			 int z = *pDepth++;
			 float px = 0;
			 float py = 0;
			 float pz = 0;
			 ConvertProjectiveToWorld(u, v, z, px, py, pz);            //調用ConvertProjectiveToWorld函數把深度轉換成世界座標系
			 vertex[v][u][0] = px;
			 vertex[v][u][1] = py;
			 vertex[v][u][2] = (float)z;
		 }
	 }

	 return 0;

 }

 int CameraOperate::SavePointCloud(const char* fileName)
 {

	 ofstream ouF;
	 ouF.open(fileName, ofstream::out);
	 if (!ouF)
	 {
		 cerr << "failed to open the file : " << fileName << endl;
		 return -1;
	 }
	 /*
	 ouF << "ply" << std::endl;
	 ouF << "format ascii 1.0" << std::endl;
	 ouF << "comment made by Orbbec " << std::endl;
	 ouF << "comment Orbbec Co.,Ltd." << std::endl;
	 ouF << "element vertex " << mWidth * mHeight << std::endl;
	 ouF << "property float32 x" << std::endl;
	 ouF << "property float32 y" << std::endl;
	 ouF << "property float32 z" << std::endl;
	 ouF << "property uint8 red" << std::endl;
	 ouF << "property uint8 green" << std::endl;
	 ouF << "property uint8 blue" << std::endl;
	 ouF << "element face 0" << std::endl;
	 ouF << "property list uint8 int32 vertex_index" << std::endl;
	 ouF << "end_header" << std::endl;
	 */

	 for (int v = 0; v < mHeight; v++)
	 {
		 for (int u = 0; u < mWidth; u++)
		 {
			 float x = vertex[v][u][0];
			 float y = vertex[v][u][1];
			 float z = vertex[v][u][2];

			 ouF << x << " " << y << " " << z << std::endl;
			 //ouF << x << " " << y << " " << z << " " << r << " " << g << " " << b << std::endl;
		 }
	 }

	 ouF.close();

	 return 0;
 }

 int CameraOperate::FrameToRGB(const VideoFrameRef &frame, Mat &color)
 {

	 double resizeFactor = std::min((640 / (double)frame.getWidth()), (480 / (double)frame.getHeight()));
	 unsigned int texture_x = (unsigned int)(640 - (resizeFactor * frame.getWidth())) / 2;
	 unsigned int texture_y = (unsigned int)(480 - (resizeFactor * frame.getHeight())) / 2;
	 OniBGR888Pixel ss; 
	 for (unsigned int y = 0; y < (480 - 2 * texture_y); ++y)
	 {
		 uint8_t* data = (uint8_t*)color.ptr<uchar>(y);
		
		 for (unsigned int x = 0; x < (640 - 2 * texture_x); ++x)
		 {
			 OniRGB888Pixel* streamPixel = (OniRGB888Pixel*)((char*)frame.getData() + ((int)(y / resizeFactor) * frame.getStrideInBytes())) + (int)(x / resizeFactor);
			
			 ss.b = streamPixel->b;
			 ss.g = streamPixel->g;
			 ss.r = streamPixel->r;
			 memcpy(data, &ss, 3);
			 data = data + 3;
		 }
	 }
	
	 return 0;
 }

主函數

/*****************************************************************************
*                                                                            *
*  OpenNI 2.x Alpha                                                          *
*  Copyright (C) 2012 PrimeSense Ltd.                                        *
*                                                                            *
*  This file is part of OpenNI.                                              *
*                                                                            *
*  Licensed under the Apache License, Version 2.0 (the "License");           *
*  you may not use this file except in compliance with the License.          *
*  You may obtain a copy of the License at                                   *
*                                                                            *
*      http://www.apache.org/licenses/LICENSE-2.0                            *
*                                                                            *
*  Unless required by applicable law or agreed to in writing, software       *
*  distributed under the License is distributed on an "AS IS" BASIS,         *
*  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  *
*  See the License for the specific language governing permissions and       *
*  limitations under the License.                                            *
*                                                                            *
*****************************************************************************/
#include <OpenNI.h>
#include <iostream>
#include <ctime>
#include "opencv/highgui.h"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include <fstream>
#include <omp.h>
#include "CCameraOperate.h"

#define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000ms
#define REC_WIDTH   640;
#define REC_HEIGHT  480;
#define CONFIG_INI          "./config.ini"
#define CAMERA_PARAMS_INI   "./camera_params.ini"

using namespace openni;
using namespace std;
using namespace cv;


void WriteLog(double RunTime, double NotStartStream1, double WaitFailed1, double NotStartStream2, double WaitFailed2, double RunNum)
{
	SYSTEMTIME st;
	GetLocalTime(&st);
	FILE *fp;
	fp = fopen("log.txt", "at");
	fprintf(fp, "MyLogInfo: %d:%d:%d:%d\n ", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
	fprintf(fp, "運行時間:%f\n", RunTime);
	fprintf(fp, "運行次數:%f \n", RunNum);
	fprintf(fp, "設備1流開始失敗次數:%f  設備1等待失敗次數:%f \n", NotStartStream1, WaitFailed1);
	fprintf(fp, "設備2流開始失敗次數:%f  設備2等待失敗次數:%f \n", NotStartStream2, WaitFailed2);
	fclose(fp);
}

int main(int argc, char** argv)
{
	openni::Status rc = openni::STATUS_OK;
	openni::Status rc1 = openni::STATUS_OK;
	openni::Status rc2 = openni::STATUS_OK;
	//傳感器1的矩陣
	Mat     cvRGBImg1(480, 640, CV_8UC3);
	Mat     cvBGRImg1(480, 640, CV_8UC3);
	Mat     cvIRImg1(480, 640, CV_16UC1);
	Mat     cvDepthImg1(480, 640, CV_16UC1);
	Mat     CaliDepth1(480, 640, CV_16UC1);
	//傳感器2的矩陣
	Mat     cvRGBImg2(480, 640, CV_8UC3);
	Mat     cvIRImg2(480, 640, CV_16UC1);
	Mat     cvDepthImg2(480, 640, CV_16UC1);
	Mat     CaliDepth2(480, 640, CV_16UC1);
	Mat     cvBGRImg2(480, 640, CV_8UC3);

	CameraOperate camera;
//	CameraOperate camera2;

	rc = openni::OpenNI::initialize();//初始化OpenNI環境
	if (rc != openni::STATUS_OK)
	{
		printf("%s: Initialize failed\n%s\n", argv[0], openni::OpenNI::getExtendedError());
		return 1;
	}

	openni::Array<openni::DeviceInfo> deviceList;
	openni::OpenNI::enumerateDevices(&deviceList);//獲取到設備列表

	const char* device1Uri;//設備標識
	const char* device2Uri;

	switch (argc)
	{
	case 1:
		if (deviceList.getSize() < 2)
		{
			printf("Missing devices\n");
			openni::OpenNI::shutdown();
			return 1;
		}
		device1Uri = deviceList[1].getUri();
		device2Uri = deviceList[0].getUri();
		break;
	case 2:
		if (deviceList.getSize() < 1)
		{
			printf("Missing devices\n");
			openni::OpenNI::shutdown();
			return 1;
		}
		device1Uri = argv[1];
		if (strcmp(deviceList[0].getUri(), device1Uri) != 0)
			device2Uri = deviceList[0].getUri();
		else
			device2Uri = deviceList[1].getUri();
		break;
	default:
		device1Uri = argv[1];
		device2Uri = argv[2];
	}


	SYSTEMTIME sys;
	double Time0 = 0;
	double Time1 = 0;
	double Time11 = 0;
	double Time2 = 0;
	double Time22 = 0;
	double Time33 = 0;
	double Sum = 0;
	GetLocalTime(&sys);
	time_t t1;
	time_t t2;
	time(&t1);


	double NotStartStream1 = 0;                       //設備1不能打開流的次數
	double NotStartStream2 = 0;                       //設備2不能打開流的次數
	double WaitFailed1 = 0;                            //設備1等待超時的次數
	double WaitFailed2 = 0;                            //設備1等待超時的次數

	int OpenStatue1=camera.OpenDevice(device1Uri,1);
	int CreateDepthStatue1 = camera.CreateRGBStream(1);
	int CreateColorStatue1 = camera.CreateDepthStream(1);
	int StartRGBStatue1 = camera.StartRGBStream(1);

	if ((OpenStatue1 || CreateDepthStatue1 || CreateColorStatue1 || StartRGBStatue1)!=0)
	{
		printf("設備1初始化有問題!");
	}


	int OpenStatue2 = camera.OpenDevice(device2Uri,2);
	int CreateDepthStatue2 = camera.CreateRGBStream(2);
	int CreateColorStatue2 = camera.CreateDepthStream(2);
	int StartRGBStatue2 = camera.StartRGBStream(2);
	if ((OpenStatue2 || CreateDepthStatue2 || CreateColorStatue2 || StartRGBStatue2) != 0)
	{
		printf("設備2初始化有問題!");
	}
  
	while (1)
	{
		if (rc1 != openni::STATUS_OK)
		{
			int OpenStatue1 = camera.OpenDevice(device1Uri, 1);
			int CreateColorStatue1 = camera.CreateDepthStream(1);
		}

		if (rc2 != openni::STATUS_OK)
		{
			int OpenStatue2 = camera.OpenDevice(device1Uri, 2);
			int CreateColorStatue2 = camera.CreateDepthStream(2);
		}
	
	   while (1)
		{

			GetLocalTime(&sys);                                                                          //得到系統時間
			Time0 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;

			/*******************設備1的啓讀取********************************************/

			int OpenDeviceStatue1 = camera.SatrtDepthStream(1);
			if (OpenDeviceStatue1 != 0)
			{
				printf("%s: Couldn't start depth1 stream %d on device %s\n%s\n", argv[0], openni::SENSOR_DEPTH, device1Uri, openni::OpenNI::getExtendedError());
				NotStartStream1++;
				rc1 = rc;
				camera.StopDepthStream(1);
				camera.DestoryDepthStream(1);
				camera.CloseDevice(1);
				break;                                                                                   //調到主循環外面重新啓動設備1

			}

			int ReadFrameStatue1 = camera.ReadRGBDepth_Frame(cvBGRImg1, cvDepthImg1, 1);

			if (ReadFrameStatue1 == 0)
			{

				
				IplImage Image1 = IplImage(cvBGRImg1);
				cvSaveImage("Image1.jpg", &Image1);
				camera.MappingDepth2Color(cvDepthImg1, CaliDepth1);
				camera.MatToPointCloud(CaliDepth1, cvRGBImg1);
				camera.SavePointCloud("PointCloud1.txt");
				rc1 = openni::STATUS_OK;
			
			}

			int StopStatue1=camera.StopDepthStream(1);
			if (StopStatue1 != 0)
			{
				printf("Stop failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());
			
			}
			//camera1.DestoryDepthStream(1);
			GetLocalTime(&sys);
			Time1 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;
			Time11 = Time1 - Time0;
			cout << "設備1花費時間爲:" << Time11 << endl;

			/*******************設備2的啓讀取********************************************/
 

			int OpenDeviceStatue2 = camera.SatrtDepthStream(2);
			if (OpenDeviceStatue2 != 0)
			{
				printf("%s: Couldn't start depth2 stream %d on device %s\n%s\n", argv[0], openni::SENSOR_DEPTH, device1Uri, openni::OpenNI::getExtendedError());
				NotStartStream2++;
				DWORD dw = GetLastError();
				rc2 = rc;
				camera.StopDepthStream(2);
				camera.DestoryDepthStream(2);
				camera.CloseDevice(2);
				break;                                                                                   //調到主循環外面重新啓動設備1

			}
		
			int ReadFrameStatue2 = camera.ReadRGBDepth_Frame(cvBGRImg2, cvDepthImg2, 2);

			if (ReadFrameStatue2 == 0)
			{
				IplImage Image2 = IplImage(cvBGRImg2);
				cvSaveImage("Image2.jpg", &Image2);
				camera.MappingDepth2Color(cvDepthImg2, CaliDepth2);
				camera.MatToPointCloud(CaliDepth2, cvRGBImg2);
				camera.SavePointCloud("PointCloud2.txt");
				rc2 = openni::STATUS_OK;

			}
			int StopStatue2 = camera.StopDepthStream(2);
			if (StopStatue2 != 0)
			{
				printf("Stop failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());

			}

			GetLocalTime(&sys);
			Time2 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;
			Time22 = Time2 - Time1;
			cout << "設備2花費時間爲:" << Time22 << endl;
			Time33 = Time2 - Time0;
			cout << "設備1,2花費時間爲:" << Time33 << endl;
			cout << "運行次數" << Sum << endl;
			Sum++;
			time(&t2);
			cout << "運行時間爲:" << t2 - t1 << endl;
			cout << endl;
			
			if ((int)Sum % 100 == 0)
			{
				WriteLog(t2 - t1, NotStartStream1, WaitFailed1, NotStartStream2, WaitFailed2, Sum);

		    }

	 }
  }

}

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章