轉自https://blog.csdn.net/freezlz/article/details/21044315
2014-03-08 22:05:58
描述live555 client即openRTSP的流程,簡單點說,playCommon.cpp,流爲h264和g726。在實際項目中已成功應用。
以下爲我所見所得,有錯誤之處請指正,謝謝!
1、live555的三種任務
socket handler,event handler,delay task。
這三種任務的特點是,前兩個加入執行隊列後會一直存在,而delay task在執行完一次後會立即棄掉。
/*** socket handler ***/
//定義
// For handling socket operations in the background (from the event loop):
typedef void BackgroundHandlerProc(void* clientData, int mask);
//註冊
void BasicTaskScheduler
::setBackgroundHandling(int socketNum, int conditionSet, BackgroundHandlerProc* handlerProc, void* clientData) {}
//執行
BasicTaskScheduler::SingleStep(unsigned maxDelayTime)
{
(*handler->handlerProc)(handler->clientData, resultConditionSet);
}
/*** event handler ***/
//定義
typedef void TaskFunc(void* clientData);
//註冊
EventTriggerId EventTriggerId BasicTaskScheduler0
::createEventTrigger(TaskFunc* eventHandlerProc) {}
//執行
BasicTaskScheduler::SingleStep(unsigned maxDelayTime)
{
(*fTriggeredEventHandlers[i])(fTriggeredEventClientDatas[i]);
}
/*** delay task ***/
//定義
typedef void TaskFunc(void* clientData);//跟event handler一樣。
//註冊
TaskToken BasicTaskScheduler0::
scheduleDelayedTask(int64_t microseconds,TaskFunc* proc, void* clientData) {}
//執行
BasicTaskScheduler::SingleStep(unsigned maxDelayTime)
{
fDelayQueue.handleAlarm();
}
void DelayQueue::handleAlarm()
{
if (head()->fDeltaTimeRemaining != DELAY_ZERO) synchronize();
if (head()->fDeltaTimeRemaining == DELAY_ZERO)
{
// This event is due to be handled:
DelayQueueEntry* toRemove = head();
removeEntry(toRemove); // do this first, in case handler accesses queue
toRemove->handleTimeout(); //僅執行一次後就remove
}
}
2、rtsp交互
//OPTIONS—>DESCRIBE—>SETUP—>PLAY,這是最通用的交互了。
getOptions()—>continueAfterOPTIONS()—>
getSDPDescription()—>continueAfterDESCRIBE()
{
session = MediaSession::createNew(*env, sdpDescription);
while()
{
//音視頻子會話
subsession->initiate();
}
setupStreams();
}
—>
//setupStreams爲遞歸函數(setupStreams–>continueAfterSETUP–>setupStreams)
//setupSubsession所有的子會話
setupStreams()
{
while()
{
setupSubsession(subsession, streamUsingTCP, forceMulticastOnUnspecified, continueAfterSETUP);
}
startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
}
3、以getOptions舉例
getOptions(continueAfterOPTIONS),getOptions後怎麼調用到continueAfterOPTIONS的,如下:
//responseHandler* afterFunc 都由handler() 執行
getOptions(continueAfterOPTIONS)—>sendOptionsCommand()—>sendRequest()
{
—>openConnection()
{
—>connectToServer()
{
setBackgroundHandling(,SOCKET_WRITABLE|SOCKET_EXCEPTION,connectionHandler,);
}
//連接server ok
{
setBackgroundHandling(,SOCKET_READABLE|SOCKET_EXCEPTION,incomingDataHandler,);
}
}
if (connectionIsPending) {
fRequestsAwaitingConnection.enqueue(request);
return request->cseq();
}
}
—>doEventLoop—>SingleStep()
{
//socket狀態符合,就執行註冊好的函數,例如connectionHandler/incomingDataHandler等
(*handler->handlerProc)(handler->clientData, resultConditionSet);
}
SingleStep() //1th step,執行connectionHandler,SOCKET_WRITABLE
{
handler->handlerProc = connectionHandler;
}
SingleStep() //2th step,執行incomingDataHandler,SOCKET_READABLE
{
handler->handlerProc = incomingDataHandler;
}
//incomingDataHandler會調用到continueAfterOPTIONS
void RTSPClient::incomingDataHandler(void* instance, int /mask/) {
RTSPClient* client = (RTSPClient*)instance;
client->incomingDataHandler1();
}
void RTSPClient::incomingDataHandler1() {
struct sockaddr_in dummy; // ‘from’ address - not used
int bytesRead = readSocket(envir(), fInputSocketNum, (unsigned char*)&fResponseBuffer[fResponseBytesAlreadySeen], fResponseBufferBytesLeft, dummy);
handleResponseBytes(bytesRead)
{
//call continueAfterOPTIONS() ,etc.
(*foundRequest->handler())(this, resultCode, resultString);
}
}
4、client get rtp_packet
● 先從setupStreams先入手吧
void setupStreams()
{
createOutputFiles()
{
while ((subsession = iter.next()) != NULL)
{
//h264
fileSink = H264VideoFileSink::createNew(*env, outFileName,
subsession->fmtp_spropparametersets(),
fileSinkBufferSize, oneFilePerFrame);
//g726
// Normal case:
fileSink = FileSink::createNew(*env, outFileName,
fileSinkBufferSize, oneFilePerFrame);
subsession->sink->startPlaying(*(subsession->readSource()),
subsessionAfterPlaying,
subsession);
}
}
}
//------->
Boolean MediaSink::startPlaying(MediaSource& source,
afterPlayingFunc* afterFunc,
void* afterClientData)
{
fSource = (FramedSource*)&source;
fAfterFunc = afterFunc;
fAfterClientData = afterClientData;
return continuePlaying();
}
//------->
Boolean FileSink::continuePlaying()
{
if (fSource == NULL) return False;
fSource->getNextFrame(fBuffer, fBufferSize,
afterGettingFrame, this,
onSourceClosure, this);
return True;
}
● 再從FileSink::continuePlaying入手
FileSink::continuePlaying()
FramedSource::getNextFrame()
MultiFramedRTPSource::doGetNextFrame()
MultiFramedRTPSource::doGetNextFrame1()
//以下::僅表示static func聲明所在的類
static void FramedSource::afterGetting(FramedSource* source);
static void FileSink::afterGettingFrame(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,
struct timeval presentationTime,unsigned durationInMicroseconds);
MultiFramedRTPSource::doGetNextFrame1()
是遞歸函數,退出條件爲
while (fNeedDelivery) //正常測試接收時,fNeedDelivery == 1
{
if (nextPacket == NULL)
{
break;
}
}
(Enter->Exit):即時
(Enter,) :和最近的(,Exit)配對
(,Exit):和最近的(Enter,)配對
static afterGetting::nth(Enter,) <—>static afterGetting::n+1th(,Exit)
從第一次調用continuePlaying()跟蹤。可以直接跳到3th。
4.1 step1th
continuePlaying()—>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>…自己可以trace—>
startPlayingSession()—>setupStreams()—>SingleStep()::1th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,)—>
static afterGetting(Enter,)—>static afterGettingFrame(Enter,)—>H264or5VideoFileSink::afterGettingFrame()–>
FileSink::afterGettingFrame(){addData();continuePlaying()}–>
4.2 step2th
continuePlaying()–>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>static afterGettingFrame(,Exit)—>static afterGetting(,Exit)—>
doGetNextFrame1(,Exit[nextPacket == NULL])—>networkReadHandler1(,Eixt)—>
SingleStep()::2th—>incomingReportHandler1()—>
SingleStep()::3th—>incomingDataHandler1()—>continueAfterPLAY()—>
SingleStep()::4th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,)—>
static afterGetting(Enter,)—>static afterGettingFrame(Enter,)—>H264or5VideoFileSink::afterGettingFrame()–>
FileSink::afterGettingFrame(){addData();continuePlaying()}–>
//上述1/2th還有rtsp交互和一些初始化以及h264 SPS的處理,從3th就純粹了。
//當接收到一幀完整的幀纔會調用static afterGetting()
//FileSink::afterGettingFrame(){addData();continuePlaying()}
//addData():保存一幀h264到文件。continuePlaying()繼續幹活吧。
4.3 step3th
continuePlaying()–>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>staticafterGettingFrame(,Exit)—>static afterGetting(,Exit)—>
doGetNextFrame1(,Exit[nextPacket == NULL])—>networkReadHandler1(,Eixt)—>
SingleStep()::5th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,)—>
static afterGetting(Enter,)—>static afterGettingFrame(Enter,)—>H264or5VideoFileSink::afterGettingFrame()–>
FileSink::afterGettingFrame(){addData();continuePlaying()}–>
4.4 step4th
continuePlaying()–>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>staticafterGettingFrame(,Exit)—>static afterGetting(,Exit)—>
doGetNextFrame1( , Exit[nextPacket == NULL])—>networkReadHandler1(,Eixt)—>
//以上爲針對3th的Exit,結合networkReadHandler1::3th(Enter,)和networkReadHandler1::4th(,Eixt)可以看出doGetNextFrame1爲遞歸函數。
//networkReadHandler1—>doGetNextFrame1—>static afterGetting—>continuePlaying()—>doGetNextFrame1
// SingleStep()生生不息,networkReadHandler1()進進出出
SingleStep()::6th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,)—>
static afterGetting(Enter,)—>static afterGettingFrame(Enter,)—>H264or5VideoFileSink::afterGettingFrame()–>
FileSink::afterGettingFrame(){addData();continuePlaying()}–>
//nth :multi-packet frame,會有多次SingleStep(),但不會調用static afterGetting()
//只有在收完所有的sliece,即一個完整的frame後,纔會調用static afterGetting()
continuePlaying()–>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>static afterGettingFrame(,Exit)—>static afterGetting(,Exit)—>
doGetNextFrame1(,Exit[nextPacket == NULL])—>networkReadHandler1(,Eixt)—>
SingleStep()::nth—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,Exit)—>
networkReadHandler1(,Exit)—>
SingleStep()::n+1th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,Exit)—>
networkReadHandler1(,Exit)—>
。。。。。。。。。。。。。
//最後一個packet
SingleStep()::n+mth—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,)—>
//此處纔會調用static afterGetting
static afterGetting(Enter,)—>static afterGettingFrame(Enter,)—>H264or5VideoFileSink::afterGettingFrame()–>
FileSink::afterGettingFrame(){addData();continuePlaying()}–>
//n+1th
continuePlaying()–>getNextFrame()—>doGetNextFrame()—>
doGetNextFrame1(Enter->Exit[nextPacket == NULL])—>static afterGettingFrame(,Exit)—>static afterGetting(,Exit)—>
doGetNextFrame1(,Exit[nextPacket == NULL])—>networkReadHandler1(,Eixt)—>
SingleStep()::n+m+1th—>
networkReadHandler1(Enter,)—>doGetNextFrame1(Enter,Exit)—>
networkReadHandler1(,Exit)—>
。。。。。。。。。。。。。。。。
作者:無風也流
來源:CSDN
原文:https://blog.csdn.net/freezlz/article/details/21044315
版權聲明:本文爲博主原創文章,轉載請附上博文鏈接!