GB28181學習之路——PS流解複用H264和ACC

上一篇講了PS中提取視頻,今天順手把音頻提取的也做了出來,代碼結構也梳理了一下。

音頻提取的方式和視頻是一樣的,關於視頻的提取方式請移步:GB28181學習之路——PS流解析H264

話不多說直接上代碼了,已上傳CSDN:https://download.csdn.net/download/qq_39805297/12569841

streamresolver.h:

#ifndef STREAMRESOLVER_H
#define STREAMRESOLVER_H
#include <iostream>
#include "common.h"

class StreamResolver
{
public:
    StreamResolver(int, int);
    ~StreamResolver();
    void executeProcess();
    void accessPsPacket(uint8_t*, int);
    void resolvePsPacket(uint8_t*, int);
    void resolvePesPacket(uint8_t*, int);
    void getVideoFrame(uint8_t*, int);
    void getAudioFrame(uint8_t*, int);
    void appendFrameData(uint8_t*, int);
    void writeVideoFrame();
    void writeAudioFrame();
private:
    int m_portListen;
    int m_secsWait;
    FILE* fp_ps;
    FILE* fp_video;
    FILE* fp_audio;
    StreamType m_curStream;
};

#endif //STREAMRESOLVER_H

streamresolver.cpp:

#include "streamresolver.h"
#include <jrtplib3/rtpsession.h>
#include <jrtplib3/rtppacket.h>
#include <jrtplib3/rtpudpv4transmitter.h>
#include <jrtplib3/rtpipv4address.h>
#include <jrtplib3/rtpsessionparams.h>
#include <jrtplib3/rtperrors.h>
#include <jrtplib3/rtpsourcedata.h>
#include <stdlib.h>
#include <stdio.h>
#include <iostream>
#include <string>

using namespace jrtplib;

#define PS_BUFF_SIZE 4096000
#define SAVE_PS_FILE 1
#define SAVE_VIDEO_FILE 1
#define SAVE_AUDIO_FILE 1

//
// This function checks if there was a RTP error. If so, it displays an error
// message and exists.
//

uint8_t *_frameBuff;
int _frameSize;
int _buffLen;

void checkerror(int rtperr)
{
	if (rtperr < 0)
	{
		std::cout << "ERROR: " << RTPGetErrorString(rtperr) << std::endl;
		exit(-1);
	}
}

StreamResolver::StreamResolver(int port, int secs)
:m_portListen(port),m_secsWait(secs)
{

}
StreamResolver::~StreamResolver()
{
    
}

void StreamResolver::writeVideoFrame()
{
	printf("write video frame size=%d\n", _buffLen);
	if (_frameSize != _buffLen)
		printf("error:frameSize=%d bufflen=%d\n", _frameSize, _buffLen);
#if SAVE_VIDEO_FILE
	fwrite(_frameBuff, 1, _buffLen, fp_video);
#endif
	memset(_frameBuff, 0, sizeof(_frameBuff));
	_buffLen = 0;
	_frameSize = 0;
}

void StreamResolver::writeAudioFrame()
{
    printf("write audio frame size=%d\n", _buffLen);
	if (_frameSize != _buffLen)
		printf("error:frameSize=%d bufflen=%d\n", _frameSize, _buffLen);
#if SAVE_AUDIO_FILE
	fwrite(_frameBuff, 1, _buffLen, fp_audio);
#endif
	memset(_frameBuff, 0, sizeof(_frameBuff));
	_buffLen = 0;
	_frameSize = 0;
}

void StreamResolver::resolvePesPacket(uint8_t* payloadData, int payloadLength)
{
	/******  統一 幀  ******/
	while (true)
	{
        if(payloadData[0] == 0x00 && payloadData[1] == 0x00 && 
            payloadData[2] == 0x01 && payloadData[3] == 0xe0)
        {
		    getVideoFrame(payloadData, payloadLength);
		}
        else if(payloadData[0] == 0x00 && payloadData[1] == 0x00 && 
            payloadData[2] == 0x01 && payloadData[3] == 0xc0)
        {
            getAudioFrame(payloadData, payloadLength);
        }
        else
        {
            printf("no valid stream c0 or e0\n");
            break;
        }
		uint16_t stream_size = payloadData[4] << 8 | payloadData[5];
        payloadData += 6 + stream_size;
        payloadLength -= 6 + stream_size;
        if(payloadLength < 4)
            break;
	}
}

void StreamResolver::getVideoFrame(uint8_t* payloadData, int payloadLength)
{
	memset(_frameBuff, 0, sizeof(_frameBuff));
	_buffLen = 0;
	_frameSize = 0;

    m_curStream = VIDEO_STREAM;
    int pos = 0;
    uint16_t h264_size = payloadData[4] << 8 | payloadData[5];
	uint8_t expand_size = payloadData[8];
	_frameSize = h264_size - 3 - expand_size;
	pos += 9 + expand_size;
	//全部寫入並保存幀
	if (_frameSize <= payloadLength - pos)
	{
	    memcpy(_frameBuff, payloadData + pos, _frameSize);
	    _buffLen += _frameSize;
		pos += _frameSize;
		writeVideoFrame();
	}
	else
	{
	    memcpy(_frameBuff, payloadData + pos, payloadLength - pos);
	    _buffLen += payloadLength - pos;
	    printf("Video frame size:%d\n", _frameSize);
    }
}

void StreamResolver::getAudioFrame(uint8_t* payloadData, int payloadLength)
{
    memset(_frameBuff, 0, sizeof(_frameBuff));
	_buffLen = 0;
	_frameSize = 0;

    m_curStream = AUDIO_STREAM;
    int pos = 0;
    uint16_t stream_size = payloadData[4] << 8 | payloadData[5];
	uint8_t expand_size = payloadData[8];
	_frameSize = stream_size - 3 - expand_size;
	pos += 9 + expand_size;
	//全部寫入並保存幀
	if (_frameSize <= payloadLength - pos)
	{
	    memcpy(_frameBuff, payloadData + pos, _frameSize);
	    _buffLen += _frameSize;
		pos += _frameSize;
		writeAudioFrame();
	}
	else
	{
	    memcpy(_frameBuff, payloadData + pos, payloadLength - pos);
	    _buffLen += payloadLength - pos;
	    printf("Audio frame size:%d\n", _frameSize);
    }
}

void StreamResolver::resolvePsPacket(uint8_t* payloadData, int payloadLength)
{
	int pos = 0;
    if (payloadData[0] == 0x00 && payloadData[1] == 0x00 &&
		payloadData[2] == 0x01 && payloadData[3] == 0xba)
    {
        uint8_t expand_size = payloadData[13] & 0x07;//擴展字節
        pos += 14 + expand_size;//ps包頭14
        /******  i 幀  ******/
        if (payloadData[pos] == 0x00 && payloadData[pos + 1] == 0x00 && 
            payloadData[pos + 2] == 0x01 && payloadData[pos + 3] == 0xbb)//0x000001bb ps system header
        {
	        uint16_t psh_size = payloadData[pos + 4] << 8 | payloadData[pos + 5];//psh長度
	        pos += 6 + psh_size;
	        if (payloadData[pos] == 0x00 && payloadData[pos + 1] == 0x00 && 
                payloadData[pos + 2] == 0x01 && payloadData[pos + 3] == 0xbc)//0x000001bc ps system map
	        {
		        uint16_t psm_size = payloadData[pos + 4] << 8 | payloadData[pos + 5];
		        pos += 6 + psm_size;
		    }
	        else
	        {
		        printf("no system map and no video stream\n");
		        return;
	        }
	    }
    }
    resolvePesPacket(payloadData + pos, payloadLength - pos);
}


void StreamResolver::executeProcess()
{
#ifdef RTP_SOCKETTYPE_WINSOCK
	WSADATA dat;
	WSAStartup(MAKEWORD(2, 2), &dat);
#endif // RTP_SOCKETTYPE_WINSOCK

#if SAVE_PS_FILE
	fp_ps = fopen("gb.ps", "w");
#endif // SAVE_PS_FILE
#if SAVE_VIDEO_FILE
	fp_video = fopen("gb.h264", "w");
#endif 
#if SAVE_AUDIO_FILE
	fp_audio = fopen("gb.acc", "w");
#endif 

	RTPSession sess;
	std::string ipstr;
	int status, j;

	// Now, we'll create a RTP session, set the destination
	// and poll for incoming data.

	RTPUDPv4TransmissionParams transparams;
	RTPSessionParams sessparams;

	// IMPORTANT: The local timestamp unit MUST be set, otherwise
	//            RTCP Sender Report info will be calculated wrong
	// In this case, we'll be just use 8000 samples per second.
	sessparams.SetOwnTimestampUnit(1.0 / 8000.0);

	sessparams.SetAcceptOwnPackets(true);
	transparams.SetPortbase(m_portListen);
	status = sess.Create(sessparams, &transparams);
	checkerror(status);
	_frameBuff = new uint8_t[PS_BUFF_SIZE];
	memset(_frameBuff, 0, sizeof(_frameBuff));
	_frameSize = 0;
	_buffLen = 0;

	for (j = 1; j <= m_secsWait; j++)
	{
		sess.BeginDataAccess();
		printf("secs gone %d\n", j);
		// check incoming packets
		if (sess.GotoFirstSourceWithData())
		{
			do
			{
				RTPPacket *pack;
				while ((pack = sess.GetNextPacket()) != NULL)
				{
					printf("Got packet:%d\n", pack->GetSequenceNumber());
					// You can examine the data here
					if (pack->GetPayloadType() == 96)//ps流
					{
                        accessPsPacket(pack->GetPayloadData(), pack->GetPayloadLength());
					}
					// we don't longer need the packet, so
					// we'll delete it
					sess.DeletePacket(pack);
				}
			} while (sess.GotoNextSourceWithData());
		}

		sess.EndDataAccess();

#ifndef RTP_SUPPORT_THREAD
		status = sess.Poll();
		checkerror(status);
#endif // RTP_SUPPORT_THREAD

		RTPTime::Wait(RTPTime(1, 0));
	}
	sess.BYEDestroy(RTPTime(10, 0), 0, 0);

#ifdef RTP_SOCKETTYPE_WINSOCK
	WSACleanup();
#endif // RTP_SOCKETTYPE_WINSOCK
#if SAVE_PS_FILE
	fclose(fp_ps);
#endif
#if SAVE_VIDEO_FILE
	fclose(fp_video);
#endif
#if SAVE_AUDIO_FILE
	fclose(fp_audio);
#endif
	printf("StreamReciever exits\n");
}

void StreamResolver::accessPsPacket(uint8_t* payloadData, int payloadLength)
{

#if SAVE_PS_FILE
	fwrite(payloadData, 1, payloadLength, fp_ps);
#endif
	//查找ps頭 0x000001BA
	if (payloadData[0] == 0x00 && payloadData[1] == 0x00 &&
		payloadData[2] == 0x01 && payloadData[3] == 0xba)
	{
		resolvePsPacket(payloadData, payloadLength);
	}
    else if (payloadData[0] == 0x00 && payloadData[1] == 0x00 &&
		payloadData[2] == 0x01 && payloadData[3] == 0xe0)//0x000001e0 視頻流
    {
        resolvePesPacket(payloadData, payloadLength);
    }
    else if (payloadData[0] == 0x00 && payloadData[1] == 0x00 &&
		payloadData[2] == 0x01 && payloadData[3] == 0xc0)//0x000001c0 音頻流
	{ 
        resolvePesPacket(payloadData, payloadLength);
	}
	else if (payloadData[0] == 0x00 && payloadData[1] == 0x00 &&
        payloadData[2] == 0x01 && payloadData[3] == 0xbd)//私有數據
	{ 
	}
	else  //當然如果開頭不是0x000001BA,默認爲一個幀的中間部分,我們將這部分內存順着幀的開頭向後存儲
	{
		appendFrameData(payloadData, payloadLength);
	}
}

void StreamResolver::appendFrameData(uint8_t* payloadData, int payloadLength)
{
    if (payloadLength + _buffLen >= _frameSize)
	{
		int len = _frameSize - _buffLen;
		memcpy(_frameBuff + _buffLen, payloadData, len);
		_buffLen += len;
        if(VIDEO_STREAM == m_curStream)
    		writeVideoFrame();
        else if (AUDIO_STREAM == m_curStream)
            writeAudioFrame();
        
		if (payloadLength > len)
			resolvePsPacket(payloadData + len, payloadLength - len);
	}
	else
	{
		memcpy(_frameBuff + _buffLen, payloadData, payloadLength);
		_buffLen += payloadLength;
	}
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章