調用webrtc裏面提供的方法獲取視頻數據並存儲YUV文件

#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/video_capture/windows/video_capture_ds.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
#include "webrtc/media/base/device.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/arraysize.h"
#include "webrtc/media/base/videocommon.h"

using namespace webrtc;
using namespace webrtc::videocapturemodule;
using namespace cricket;

struct kDSVideoFourCCEntry {
	uint32_t fourcc;
	webrtc::RawVideoType webrtc_type;
};

kDSVideoFourCCEntry kDSSupportedFourCCs[] = {
	{ FOURCC_I420, webrtc::kVideoI420 },   // 12 bpp, no conversion.
	{ FOURCC_YV12, webrtc::kVideoYV12 },   // 12 bpp, no conversion.
	{ FOURCC_YUY2, webrtc::kVideoYUY2 },   // 16 bpp, fast conversion.
	{ FOURCC_UYVY, webrtc::kVideoUYVY },   // 16 bpp, fast conversion.
	{ FOURCC_NV12, webrtc::kVideoNV12 },   // 12 bpp, fast conversion.
	{ FOURCC_NV21, webrtc::kVideoNV21 },   // 12 bpp, fast conversion.
	{ FOURCC_MJPG, webrtc::kVideoMJPEG },  // compressed, slow conversion.
	{ FOURCC_ARGB, webrtc::kVideoARGB },   // 32 bpp, slow conversion.
	{ FOURCC_24BG, webrtc::kVideoRGB24 },  // 24 bpp, slow conversion.
};

class DSImageData : public VideoCaptureDataCallback {
public:
	DSImageData() {};
	virtual ~DSImageData() {};

	virtual void OnIncomingCapturedFrame(const int32_t id, const VideoFrame& videoFrame){
		LOG(INFO) << "OnIncomingCapturedFrame==>>" << "height:" << videoFrame.height() << " width:" << videoFrame.width()
			<< " timestamp:" << videoFrame.render_time_ms() << " ";
		rtc::scoped_refptr<webrtc::VideoFrameBuffer> vfb = videoFrame.video_frame_buffer();
		static FILE *fp = fopen("video.yuv", "wb+");
		if (fp != NULL)
		{
			fwrite(vfb.get()->DataY(), 1, videoFrame.height() * videoFrame.width()*3/2, fp);
			fwrite(vfb.get()->DataU(), 1, videoFrame.height() * videoFrame.width()/4, fp);
			fwrite(vfb.get()->DataV(), 1, videoFrame.height() * videoFrame.width()/4, fp);
			fflush(fp);
		}
	};
	virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay) {
		LOG(INFO) << "OnCaptureDelayChanged";
	};
};


std::string DSGetDeviceName(const Device& device)
{
	DeviceInfoDS* info = DeviceInfoDS::Create(0);
	int num_cams = info->NumberOfDevices();
	char vcm_id[256] = "";
	bool found = false;
	for (int index = 0; index < num_cams; ++index) {
		char vcm_name[256];
		if (info->GetDeviceName(index, vcm_name, arraysize(vcm_name), vcm_id, arraysize(vcm_id), 0, 0) != -1) {
			if (device.name == reinterpret_cast<char*>(vcm_name)) {
				found = true;
				return vcm_id;
				break;
			}
		}
	}
	if (!found) {
		LOG(LS_WARNING) << "Failed to find capturer for id: " << device.id;
		delete info;
		return "";
	}
	return "";
}

bool DSFormatToCapability(const VideoFormat& format, webrtc::VideoCaptureCapability* cap) 
{
	webrtc::RawVideoType webrtc_type = webrtc::kVideoUnknown;
	for (size_t i = 0; i < arraysize(kDSSupportedFourCCs); ++i) {
		if (kDSSupportedFourCCs[i].fourcc == format.fourcc) {
			webrtc_type = kDSSupportedFourCCs[i].webrtc_type;
			break;
		}
	}
	if (webrtc_type == webrtc::kVideoUnknown) {
		return false;
	}

	cap->width = format.width;
	cap->height = format.height;
	cap->maxFPS = VideoFormat::IntervalToFps(format.interval);
	cap->expectedCaptureDelay = 0;
	cap->rawType = webrtc_type;
	cap->codecType = webrtc::kVideoCodecUnknown;
	cap->interlaced = false;
	return true;
}

int main(int argc, char* argv[])
{
	std::vector<std::string> device_names;
	{
		std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(webrtc::VideoCaptureFactory::CreateDeviceInfo(0));
		if (!info)
		{
			return -1;
		}
		int num_devices = info->NumberOfDevices();
		for (int i = 0; i < num_devices; ++i)
		{
			const uint32_t kSize = 256;
			char name[kSize] = { 0 };
			char id[kSize] = { 0 };
			if (info->GetDeviceName(i, name, kSize, id, kSize) != -1)
			{
				device_names.push_back(name);
			}
		}
	}

#if 0
	for (const auto& name : device_names)
	{
		//rtc::scoped_refptr<VideoCaptureDS> capture(new rtc::RefCountedObject<VideoCaptureDS>(0));
		std::string strName = DSGetDeviceName(Device(name, 0));
		if(strName != "")
		if (capture->Init(0, strName.c_str()) != 0) {
			return -1;
		}
		if (capture) 
		{
			LOG(INFO) << "Get CanUsed Device";
			webrtc::VideoCaptureCapability cap;
			VideoFormat capture_format;
			capture_format.height = 720;
			capture_format.width = 1280;
			capture_format.interval = 33333333;
			capture_format.fourcc = 1196444237;
			if (!DSFormatToCapability(capture_format, &cap)) {
				LOG(LS_ERROR) << "Invalid capture format specified";
				return -1;
			}
			capture->StartCapture(cap);
			while (true)
			{
				Sleep(1);
			}
		}
	}
#endif
	for (const auto& name : device_names)
	{
		std::string strName = DSGetDeviceName(Device(name, 0));
		rtc::scoped_refptr<VideoCaptureModule> cam;
		
		if (strName != "")
			cam = VideoCaptureImpl::Create(0, strName.c_str());
		if (cam)
		{
			LOG(INFO) << "Get CanUsed Device";
			webrtc::VideoCaptureCapability cap;
			VideoFormat capture_format;
			capture_format.height = 720;
			capture_format.width = 1280;
			capture_format.interval = 33333333;
			capture_format.fourcc = 1196444237;
			if (!DSFormatToCapability(capture_format, &cap)) {
				LOG(LS_ERROR) << "Invalid capture format specified";
				return -1;
			}
			cam->StartCapture(cap);
			//int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame)
			//這個函數用於取數據,但是有圖void VideoCaptureImpl::RegisterCaptureDataCallback(
			//沒有註冊相關的取數據的類,所以一層沒辦法獲取攝像頭數據
			//註冊callback用於獲取數據
			DSImageData dsi;
			cam->RegisterCaptureDataCallback(dsi);

			while (true)
			{
				Sleep(1);
			}
		}
	}

	//webrtc::videocapturemodule::VideoCaptureDS * vcds = new VideoCaptureDS();
	return 0;
}

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章