基於live555實時流服務器解析

#define READ_FROM_FILES_SYNCHRONOUSLY 1READ_FROM_FILES_SYNCHRONOUSLY 1
void ByteFrameLiveVideoSource:: doGetNextFrame()
{
	if(fLimitNumBytesToStream && fNumBytesToStream == 0) {
		handleClosure(this);
		return;
	}
//	printf("ByteFrameLiveVideoSource doGetNextFrame 1: \r\n");


#ifdef READ_FROM_FILES_SYNCHRONOUSLY
	 //從編碼處獲取frame!!!!
	   doGetNextFrameFormEncoder();
#else
	   
	if (!fHaveStartedReading) {
	// Await readable data from the file:
		envir().taskScheduler().turnOnBackgroundReadHandling(GETFRAME_HANDLER_ID,
		(TaskScheduler::BackgroundHandlerProc*)&getFrameableHandler, this);
		fHaveStartedReading = True;
	}
#endif	  
}
	if(fLimitNumBytesToStream && fNumBytesToStream == 0) {
		handleClosure(this);
		return;
	}
//	printf("ByteFrameLiveVideoSource doGetNextFrame 1: \r\n");


#ifdef READ_FROM_FILES_SYNCHRONOUSLY
	 //從編碼處獲取frame!!!!
	   doGetNextFrameFormEncoder();
#else
	   
	if (!fHaveStartedReading) {
	// Await readable data from the file:
		envir().taskScheduler().turnOnBackgroundReadHandling(GETFRAME_HANDLER_ID,
		(TaskScheduler::BackgroundHandlerProc*)&getFrameableHandler, this);
		fHaveStartedReading = True;
	}
#endif	  
}
void ByteFrameLiveVideoSource:: doGetNextFrameFormEncoder(){

	// Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)
	
	if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) {
		fMaxSize = (unsigned)fNumBytesToStream;
	}
	if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) {
		fMaxSize = fPreferredFrameSize;
	}

	else srcId =0;

	fFrameSize =0;

	if(getFrame != NULL){			
		//回調函數,外部送流給live555
		fFrameSize = getFrame(chId,srcId,fTo,fMaxSize); 
		if(fFrameSize > fMaxSize){
			///這裏非常重要,如果不設置fNumTruncatedBytes這個變量,fTo無法回收
			///說明沒有足夠的空間存放一幀,丟棄,應該請求一個IDR幀,getFrame請求IDR
			fNumTruncatedBytes = fFrameSize - fMaxSize;
			fFrameSize = fMaxSize;
		}
		else fNumTruncatedBytes = 0;
	}
 
 
	if (fFrameSize == 0) {
		handleClosure(this);
		return;
	} 
 
 
	//fNumBytesToStream -= fFrameSize;
	// Set the 'presentation time':
	if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
	if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
		// This is the first frame, so use the current time:
		gettimeofday(&fPresentationTime, NULL);
	} else {
		// Increment by the play time of the previous data:
		unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;
		fPresentationTime.tv_sec += uSeconds/1000000;
		fPresentationTime.tv_usec = uSeconds%1000000;
	}
 
 
		// Remember the play time of this data:
		fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
		fDurationInMicroseconds = fLastPlayTime;
	} else {
		// We don't know a specific play time duration for this data,
		// so just record the current time as being the 'presentation time':
		gettimeofday(&fPresentationTime, NULL);
	}
 
 
	// Inform the reader that he has data:
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
	// To avoid possible infinite recursion, we need to return to the event loop to do this:
	nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
	(TaskFunc*)FramedSource::afterGetting, this);
#else
	// Because the file read was done from the event loop, we can call the
	// 'after getting' function directly, without risk of infinite recursion:
	FramedSource::afterGetting(this);
#endif
}
	

重寫 H264LiveVideoServerMediaSubsession 由 H264FileServerMediaSubsession修改,H264FileServerMediaSubsession createsource 來自ByteFile。。。source

H264LiveVideoServerMediaSubsession來自ByteFrameLiveVideoSource

關鍵代碼,兩個回調函數,tempCB和stopCb,tempCb填充ByteFrameLiveVideoSource::getFrame,stopCb用於RTSP接受到teardown命令執行的一個函數!

 

FramedSource* H264LiveVideoServerMediaSubsession ::
	createNewStreamSource(unsigned clientSessionId,unsigned & estBitrate){
	  estBitrate = 500; // kbps, estimate

  // Create the video source:
  // if(tempCb != NULL) cout<<"create new stream source------------------>"<<endl; 

  ByteFrameLiveVideoSource* liveVideoSource = ByteFrameLiveVideoSource::createNew(envir(),tempCb,stopCb,chId,srcId);
//  printf("createNewStreamSource--------------====> %#x\n",liveVideoSource);

  if(liveVideoSource !=NULL){
  	cout<< "create liveVideoSource OK \n";
  	return H264VideoStreamFramer::createNew(envir(), liveVideoSource);
  }
  else return NULL;
}

 

 

 

 

 

最後接口階段

class liveVideoRTSPServer: public RTSPServerSupportingHTTPStreaming {
public:
  static liveVideoRTSPServer* createNew(Port ourPort,UserAuthenticationDatabase* authDatabase,
  	GetFrameCB cb,StopPlayCB stopCb,unsigned reclamationTestSeconds = 65);

protected:
  liveVideoRTSPServer(UsageEnvironment& env, int ourSocket, Port ourPort,
		    UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds);
  // called only by createNew();
  virtual ~liveVideoRTSPServer();

protected: // redefined virtual functions
  virtual ServerMediaSession* lookupServerMediaSession(char const* streamName);
private:
	GetFrameCB readFreamCb;
	StopPlayCB stopPlayCb;
public:
	static UsageEnvironment* s_env;
	static UsageEnvironment* getEnv();
};
lookupServerMediaSession這個可以定義自己的訪問規則,我這裏定義了RTSP://IP:554/ch0/main 即可訪問主碼流。ch0對應通道0,main主碼流,sub子碼流

 

C接口:

 

 

void* liveVideoServerStart(myGetFrameCB cb,myStopPlayCB stopCb){
	
	RTSPServer* rtspServer;
	portNumBits rtspServerPortNum = 554;
  // Begin by setting up our usage environment:
	rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum,NULL,(GetFrameCB)cb,(StopPlayCB)stopCb);

	if (rtspServer == NULL) {

    	rtspServerPortNum = 8554;
    	rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum, NULL,(GetFrameCB)cb,(StopPlayCB)stopCb);
  	}
	if (rtspServer == NULL) {
    	*liveVideoRTSPServer::getEnv() << "Failed to create RTSP server: " <<liveVideoRTSPServer::getEnv()->getResultMsg() << "\n";
    	exit(1);
  	}
	
	char* urlPrefix = rtspServer->rtspURLPrefix();
	fprintf(stdout, "use like this:%s", urlPrefix);
	fprintf(stdout, "channel/srcch \n");
	
  	if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
    	*liveVideoRTSPServer::getEnv() << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n";
  	} 
	else {
    	*liveVideoRTSPServer::getEnv() << "(RTSP-over-HTTP tunneling is not available.)\n";
 	}

	liveVideoRTSPServer::getEnv()->taskScheduler().doEventLoop(); // does not return
	return NULL;
}

定義自己的訪問規則!

static ServerMediaSession* createNewSMS(UsageEnvironment& env,
					char const* streamName, GetFrameCB cb,StopPlayCB stopCb) {
  // Use the file name extension to determine the type of "ServerMediaSession":
  int chId,SrcId;
  int i;
  ServerMediaSession* sms = NULL;
  Boolean const reuseSource = False;
  char const* extension = strrchr(streamName, '/');
  char const* pstr = streamName;
  char chStr[10]={0} ;
  //pstr = streamName;
  if (extension == NULL) return NULL;
  for(i=0;i<strlen(streamName);i++){
  	if(*pstr == '/'){
		break;	
	}
	chStr[i] = *pstr;
	pstr++;
  }
  chStr[i]='\0';
  if(strcmp(chStr,"ch0")){
  	chId =0;
  }else if(strcmp(chStr,"ch1")){
  	chId =1;
  }else return NULL;
  
 
  if (strcmp(extension, "/main") == 0) {
  	SrcId = 0;	
  } else if (strcmp(extension, "/sub") == 0){
  	SrcId = 1;
  }else 
  	return NULL;
  cout<<"create H264LiveVideoServerMediaSubsession"<<endl;
  NEW_SMS("H.264 Video");
  OutPacketBuffer::maxSize = 1920*1080*3/2; //6000000;//HIGH*WIDTH *3 /2 -> YUV4:2:0 // allow for some possibly large H.264 frames
  sms->addSubsession(H264LiveVideoServerMediaSubsession::createNew(env,cb, stopCb,chId,SrcId, reuseSource));

  return sms;
}

 

VLC 觀看:

 

總結:

經過修改可以完成編碼器-live555 RTSP推送碼流,VLC觀看!

存在實時性相對較差,開多個卡,偶爾有畫面花屏。待修改!


 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章