一、環境介紹
操作系統: windows系統 、 win10 X64
使用的FFMPEG庫版本下載地址:https://download.csdn.net/download/xiaolong1126626497/12304729
在windows下使用FFMPEG庫時,爲了方便程序運行,記得把庫的路徑加到系統的環境變量裏。
二、程序功能介紹
目前第一版本代碼沒有優化,實現拉流解碼播放的基本功能,目前聲音播放有輕微的卡頓,聽起來不順暢,因爲程序裏解碼之後就直接播放,沒有加隊列緩存,下一篇博客再繼續優化。
代碼裏有兩個線程:主線程進行UI界面顯示,子線程負責拉流解碼,子線程裏解碼視頻之後,將圖像數據通過信號發送給主UI界面進行刷新顯示。
代碼裏的目前支持解碼的視頻編碼爲H264、音頻是AAC,其他的編碼暫時沒有加入支持,如果有需求,修改增加代碼即可。
代碼裏的視頻解碼流程:獲取一幀H264編碼的視頻幀-->解碼成YUV420P格式數據->轉換爲RGB24格式->加載到QImage裏-->通過標籤控件進行顯示。
代碼裏的音頻解碼流程:獲取一幀AAC編碼的音頻幀--->解碼成PCM格式--->進行音頻重採樣轉換成自己需要的PCM格式-->通過QT的音頻接口輸出到聲卡進行播放。
打包的成品軟件下載地址:https://download.csdn.net/download/xiaolong1126626497/12317449
三、程序運行效果
下面是播放流媒體服務器視頻的效果,視頻+聲音是OK的。
上面的RTMP地址欄裏,也可以填本地電腦上的視頻路徑,只要視頻是H264+AAC編碼的,也可以播放,只是每加時間處理,播放會非常的快。
四、程序代碼
xxx.pro工程文件代碼:
QT += core gui
QT += multimediawidgets
QT += xml
QT += multimedia
QT += network
QT += widgets
QT += serialport
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += \
Thread_FFMPEG_LaLiu.cpp \
main.cpp \
widget.cpp
HEADERS += \
Thread_FFMPEG_LaLiu.h \
widget.h
FORMS += \
widget.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
win32
{
message('運行win32版本')
INCLUDEPATH+=C:/FFMPEG_WIN32_LIB/include
LIBS+=C:/FFMPEG_WIN32_LIB/bin/av*
LIBS+=C:/FFMPEG_WIN32_LIB/bin/sw*
LIBS+=C:/FFMPEG_WIN32_LIB/bin/pos*
}
RESOURCES += \
image.qrc
RC_ICONS=log.ico
laliu.cpp代碼:
#include "Thread_FFMPEG_LaLiu.h"
#define MAX_AUDIO_FRAME_SIZE 192000
//定義拉流的線程
class Thread_FFMPEG_LaLiu thread_laliu;
class VideoAudioDecode video_audio_decode;
//線程執行起點
void Thread_FFMPEG_LaLiu::run()
{
Audio_Out_Init();
//判斷之前是否申請了空間
if(rgb24_data)
{
delete rgb24_data;
rgb24_data=nullptr;
}
if(yuv420p_data)
{
delete yuv420p_data;
yuv420p_data=nullptr;
}
LogSend("開始拉流.\n");
//qDebug()<<"AV_CODEC_ID_H264="<<AV_CODEC_ID_H264; //27
//qDebug()<<"AV_CODEC_ID_AAC="<<AV_CODEC_ID_AAC; //86018
ffmpeg_rtmp_client();
}
//拉流
int Thread_FFMPEG_LaLiu::ffmpeg_rtmp_client()
{
int video_width=0;
int video_height=0;
// Allocate an AVFormatContext
AVFormatContext* format_ctx = avformat_alloc_context();
// 打開rtsp:打開輸入流並讀取標題。 編解碼器未打開
const char* url =video_audio_decode.rtmp_url;// "rtmp://193.112.142.152:8888/live/abcd";
LogSend(tr("拉流地址: %1\n").arg(url));
int ret = -1;
ret = avformat_open_input(&format_ctx, url, nullptr, nullptr);
if(ret != 0)
{
LogSend(tr("無法打開網址: %1, return value: %2 \n").arg(url).arg(ret));
return -1;
}
// 讀取媒體文件的數據包以獲取流信息
ret = avformat_find_stream_info(format_ctx, nullptr);
if(ret < 0)
{
LogSend(tr("無法獲取流信息: %1\n").arg(ret));
return -1;
}
AVCodec *video_pCodec;
AVCodec *audio_pCodec;
// audio/video stream index
int video_stream_index = -1;
int audio_stream_index = -1;
LogSend(tr("視頻中流的數量: %1\n").arg(format_ctx->nb_streams));
for(int i = 0; i < format_ctx->nb_streams; ++i)
{
const AVStream* stream = format_ctx->streams[i];
LogSend(tr("編碼數據的類型: %1\n").arg(stream->codecpar->codec_id));
if(stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
//判斷視頻流是否是H264格式
if(stream->codecpar->codec_id!=AV_CODEC_ID_H264)
{
LogSend("當前視頻編碼格式暫時不支持. 目前只支持:H264\n");
return 0;
}
//查找解碼器
video_pCodec=avcodec_find_decoder(AV_CODEC_ID_H264);
//打開解碼器
int err = avcodec_open2(stream->codec,video_pCodec, NULL);
if(err!=0)
{
LogSend(tr("H264解碼器打開失敗.\n"));
return 0;
}
video_stream_index = i;
//得到視頻幀的寬高
video_width=stream->codecpar->width;
video_height=stream->codecpar->height;
LogSend(tr("視頻幀的尺寸(以像素爲單位): (寬X高)%1x%2 像素格式: %3\n").arg(
stream->codecpar->width).arg(stream->codecpar->height).arg(stream->codecpar->format));
}
else if(stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
audio_stream_index = i;
qDebug()<<tr("音頻樣本格式: %1").arg(stream->codecpar->format);
//判斷音頻流是否是AAC格式
if(stream->codecpar->codec_id!=AV_CODEC_ID_AAC)
{
LogSend("當前音頻編碼格式暫時不支持. 目前只支持:AAC\n");
return 0;
}
//查找解碼器
audio_pCodec=avcodec_find_decoder(AV_CODEC_ID_AAC);
//打開解碼器
int err = avcodec_open2(stream->codec,audio_pCodec, nullptr);
if(err!=0)
{
LogSend(tr("AAC解碼器打開失敗.\n"));
return 0;
}
}
}
if (video_stream_index == -1)
{
LogSend("沒有檢測到視頻流.\n");
return -1;
}
if (audio_stream_index == -1)
{
LogSend("沒有檢測到音頻流.\n");
}
//初始化解碼相關的參數
AVFrame *yuv420p_pFrame = nullptr;
AVFrame *PCM_pFrame = nullptr;
yuv420p_pFrame = av_frame_alloc();// 存放解碼後YUV數據的緩衝區
PCM_pFrame = av_frame_alloc();// 存放解碼後PCM數據的緩衝區
//創建packet,用於存儲解碼前音頻的數據
AVPacket *packet = (AVPacket *)malloc(sizeof(AVPacket));
av_init_packet(packet);
//設置音頻轉碼後輸出相關參數
//採樣的佈局方式
uint64_t out_channel_layout = AV_CH_LAYOUT_MONO;
//採樣個數
int out_nb_samples = 1024;
//採樣格式
enum AVSampleFormat sample_fmt = AV_SAMPLE_FMT_S16;
//採樣率
int out_sample_rate = 44100;
//通道數
int out_channels = av_get_channel_layout_nb_channels(out_channel_layout);
printf("%d\n",out_channels);
//創建buffer
int buffer_size = av_samples_get_buffer_size(nullptr, out_channels, out_nb_samples, sample_fmt, 1);
//注意要用av_malloc
uint8_t *buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE * 2);
int64_t in_channel_layout = av_get_default_channel_layout(format_ctx->streams[audio_stream_index]->codec->channels);
//打開轉碼器
struct SwrContext *convert_ctx = swr_alloc();
//設置轉碼參數
convert_ctx = swr_alloc_set_opts(convert_ctx, out_channel_layout, sample_fmt, out_sample_rate, \
in_channel_layout, format_ctx->streams[audio_stream_index]->codec->sample_fmt, format_ctx->streams[audio_stream_index]->codec->sample_rate, 0, nullptr);
//初始化轉碼器
swr_init(convert_ctx);
//申請存放yuv420p數據的空間
yuv420p_data=new unsigned char[video_width*video_height*3/2];
//申請存放rgb24數據的空間
rgb24_data=new unsigned char[video_width*video_height*3];
int y_size=video_width*video_height;
AVPacket pkt;
int re;
bool send_flag=1;
while(video_audio_decode.run_flag)
{
//讀取一幀數據
ret=av_read_frame(format_ctx, &pkt);
if(ret < 0)
{
continue;
}
//得到視頻包
if(pkt.stream_index == video_stream_index)
{
//解碼視頻 frame
re = avcodec_send_packet(format_ctx->streams[video_stream_index]->codec,&pkt);//發送視頻幀
if (re != 0)
{
av_packet_unref(&pkt);//不成功就釋放這個pkt
continue;
}
re = avcodec_receive_frame(format_ctx->streams[video_stream_index]->codec, yuv420p_pFrame);//接受後對視頻幀進行解碼
if (re != 0)
{
av_packet_unref(&pkt);//不成功就釋放這個pkt
continue;
}
//將YUV數據拷貝到緩衝區
memcpy(yuv420p_data,(const void *)yuv420p_pFrame->data[0],y_size);
memcpy(yuv420p_data+y_size,(const void *)yuv420p_pFrame->data[1],y_size/4);
memcpy(yuv420p_data+y_size+y_size/4,(const void *)yuv420p_pFrame->data[2],y_size/4);
//將yuv420p轉爲RGB24格式
YUV420P_to_RGB24(yuv420p_data,rgb24_data,video_width,video_height);
//加載圖片數據
QImage image(rgb24_data,video_width,video_height,QImage::Format_RGB888);
VideoDataOutput(image); //發送信號
}
//得到音頻包
if(pkt.stream_index == audio_stream_index)
{
//解碼聲音
re = avcodec_send_packet(format_ctx->streams[audio_stream_index]->codec,&pkt);//發送視頻幀
if (re != 0)
{
av_packet_unref(&pkt);//不成功就釋放這個pkt
continue;
}
re = avcodec_receive_frame(format_ctx->streams[audio_stream_index]->codec, PCM_pFrame);//接受後對視頻幀進行解碼
if (re != 0)
{
av_packet_unref(&pkt);//不成功就釋放這個pkt
continue;
}
//只發送一次
if(send_flag)
{
send_flag=0;
//得到PCM數據的配置信息
LogSend(tr("nb_samples=%1\n").arg(PCM_pFrame->nb_samples)); //此幀描述的音頻樣本數(每通道
LogSend(tr("音頻數據聲道=%1\n").arg(PCM_pFrame->channels)); //聲道數量
LogSend(tr("音頻數據採樣率=%1\n").arg(PCM_pFrame->sample_rate)); //採樣率
LogSend(tr("channel_layout=%1\n").arg(PCM_pFrame->channel_layout)); //通道佈局
}
//轉碼
swr_convert(convert_ctx, &buffer, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)PCM_pFrame->data, PCM_pFrame->nb_samples);
//播放音頻
audio_out_streamIn->write((const char *)buffer,buffer_size);
}
av_packet_unref(&pkt);
}
avformat_free_context(format_ctx);
avformat_close_input(&format_ctx);//釋放解封裝器的空間,以防空間被快速消耗完
return 0;
}
//圖像顏色轉換
void Thread_FFMPEG_LaLiu::YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height)
{
int index = 0;
unsigned char *ybase = data;
unsigned char *ubase = &data[width * height];
unsigned char *vbase = &data[width * height * 5 / 4];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
//YYYYYYYYUUVV
unsigned char Y = ybase[x + y * width];
unsigned char U = ubase[y / 2 * width / 2 + (x / 2)];
unsigned char V = vbase[y / 2 * width / 2 + (x / 2)];
rgb[index++] = Y + 1.402 * (V - 128); //R
rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
rgb[index++] = Y + 1.772 * (U - 128); //B
}
}
}
//音頻輸出初始化
void Thread_FFMPEG_LaLiu::Audio_Out_Init()
{
QAudioFormat auido_out_format;
//設置錄音的格式
auido_out_format.setSampleRate(44100); //設置採樣率以對赫茲採樣。 以秒爲單位,每秒採集多少聲音數據的頻率.
auido_out_format.setChannelCount(1); //將通道數設置爲通道。
auido_out_format.setSampleSize(16); /*將樣本大小設置爲指定的sampleSize(以位爲單位)通常爲8或16,但是某些系統可能支持更大的樣本量。*/
auido_out_format.setCodec("audio/pcm"); //設置編碼格式
auido_out_format.setByteOrder(QAudioFormat::LittleEndian); //樣本是小端字節順序
auido_out_format.setSampleType(QAudioFormat::SignedInt); //樣本類型
QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice());
if(audio_out)
{
delete audio_out;
audio_out=nullptr;
}
audio_out = new QAudioOutput(auido_out_format);
audio_out_streamIn=audio_out->start();
LogSend("音頻輸出初始化成功.\n");
}
laliu.h代碼
#ifndef THREAD_FFMPEG_LALIU_H
#define THREAD_FFMPEG_LALIU_H
#include <QAbstractVideoSurface>
#include <QVideoProbe>
#include <QThread>
#include <QApplication>
#include <QDebug>
#include <QObject>
#include <QMutex>
#include <QMutexLocker>
#include <QWaitCondition>
#include <QQueue>
#include <QCamera>
#include <QPen>
#include <QPainter>
#include <QRgb>
#include <QAudio> //這五個是QT處理音頻的庫
#include <QAudioFormat>
#include <QAudioInput>
#include <QAudioOutput>
#include <QIODevice>
#include <QPlainTextEdit>
#include <QScrollBar>
//聲明引用C的頭文件
extern "C"
{
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <libavutil/avassert.h>
#include <libavutil/channel_layout.h>
#include <libavutil/opt.h>
#include <libavutil/mathematics.h>
#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include "libavfilter/avfilter.h"
#include "libavutil/avassert.h"
#include "libavutil/channel_layout.h"
#include "libavutil/common.h"
#include "libavutil/opt.h"
}
//視頻音頻解碼線程
class Thread_FFMPEG_LaLiu: public QThread
{
Q_OBJECT
public:
unsigned char *yuv420p_data;
unsigned char *rgb24_data;
QAudioOutput *audio_out;
QIODevice* audio_out_streamIn;
Thread_FFMPEG_LaLiu()
{
rgb24_data=nullptr;yuv420p_data=nullptr;
audio_out=nullptr;
audio_out_streamIn=nullptr;
}
int ffmpeg_rtmp_client();
void Audio_Out_Init();
void YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height);
protected:
void run();
signals:
void LogSend(QString text);
void VideoDataOutput(QImage); //輸出信號
};
//解碼拉流時的一些全局參數
class VideoAudioDecode
{
public:
char rtmp_url[1024];
bool run_flag; //1表示運行 0表示停止
};
extern class Thread_FFMPEG_LaLiu thread_laliu;
extern class VideoAudioDecode video_audio_decode;
#endif // THREAD_FFMPEG_LALIU_H
widget.h代碼:
#ifndef WIDGET_H
#define WIDGET_H
#include <QWidget>
#include "Thread_FFMPEG_LaLiu.h"
QT_BEGIN_NAMESPACE
namespace Ui { class Widget; }
QT_END_NAMESPACE
//主線程
class Widget : public QWidget
{
Q_OBJECT
public:
Widget(QWidget *parent = nullptr);
~Widget();
void SetStyle(const QString &qssFile);
void Log_Text_Display(QPlainTextEdit *plainTextEdit_log,QString text);
private slots:
void Log_Display(QString text);
void VideoDataDisplay(QImage image);
void on_pushButton_start_clicked();
private:
Ui::Widget *ui;
};
#endif // WIDGET_H
widget.cpp代碼:
#include "widget.h"
#include "ui_widget.h"
/*
* 設置QT界面的樣式
*/
void Widget::SetStyle(const QString &qssFile) {
QFile file(qssFile);
if (file.open(QFile::ReadOnly)) {
QString qss = QLatin1String(file.readAll());
qApp->setStyleSheet(qss);
QString PaletteColor = qss.mid(20,7);
qApp->setPalette(QPalette(QColor(PaletteColor)));
file.close();
}
else
{
qApp->setStyleSheet("");
}
}
Widget::Widget(QWidget *parent)
: QWidget(parent)
, ui(new Ui::Widget)
{
ui->setupUi(this);
/*基本設置*/
this->SetStyle(":/images/blue.css"); //設置樣式表
this->setWindowIcon(QIcon(":/log.ico")); //設置圖標
this->setWindowTitle("RTMP拉流客戶端");
//設置默認的拉流地址
ui->lineEdit_rtmp_url->setText("rtmp://193.112.142.152:8888/live/abcd");
//連接拉流線程的圖像輸出信號
connect(&thread_laliu,SIGNAL(VideoDataOutput(QImage )),this,SLOT(VideoDataDisplay(QImage )));
//連接拉流線程的日誌信息
connect(&thread_laliu,SIGNAL(LogSend(QString)),this,SLOT(Log_Display(QString)));
}
Widget::~Widget()
{
delete ui;
}
//視頻刷新顯示
void Widget::VideoDataDisplay(QImage image)
{
QPixmap my_pixmap;
my_pixmap.convertFromImage(image);
ui->label_ImageDisplay->setPixmap(my_pixmap);
}
/*日誌顯示*/
void Widget::Log_Text_Display(QPlainTextEdit *plainTextEdit_log,QString text)
{
plainTextEdit_log->insertPlainText(text);
//移動滾動條到底部
QScrollBar *scrollbar = plainTextEdit_log->verticalScrollBar();
if(scrollbar)
{
scrollbar->setSliderPosition(scrollbar->maximum());
}
}
//日誌顯示
void Widget::Log_Display(QString text)
{
Log_Text_Display(ui->plainTextEdit_log,text);
}
//開始拉流
void Widget::on_pushButton_start_clicked()
{
video_audio_decode.run_flag=1; //運行標誌
strncpy(video_audio_decode.rtmp_url,ui->lineEdit_rtmp_url->text().toLocal8Bit().data(),sizeof(video_audio_decode.rtmp_url));
//開始運行線程
thread_laliu.start();
}
ui界面設計圖:
下面公衆號裏有全套QT\C++\C、單片機基礎教程,歡迎關注: