一、環境介紹
ubuntu版本: VM虛擬機運行ubuntu18.04 64位
OpenCV版本: 3.4.9
QT版本: 5.12
OpenCV在ubuntu下的編譯方法:https://blog.csdn.net/xiaolong1126626497/article/details/105278882
二、程序思路說明
程序功能: 在子線程裏打開攝像頭,獲取攝像頭的數據,通過信號與槽的方式,將攝像頭數據傳遞給主UI界面實時顯示,在採用定時器每100ms取一次標籤上的數據進行人臉檢測處理,將處理的數據再顯示到另一個標籤上。人臉檢測分類器採用OpenCV自帶的分類器,程序主要目的是介紹OpenCV配合QT如何進行開發。
三、實現效果
四、核心代碼
xxx.pro工程文件:
QT += core gui
QT += multimediawidgets
QT += xml
QT += multimedia
QT += network
QT += widgets
QT += serialport
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += \
main.cpp \
widget.cpp
HEADERS += \
widget.h
FORMS += \
widget.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
#linu平臺的路徑設置
linux {
#添加opencv頭文件的路徑,需要根據自己的頭文件路徑進行修改
INCLUDEPATH+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/include\
/home/wbyq/work_pc/opencv-3.4.9/_install/install/include/opencv\
/home/wbyq/work_pc/opencv-3.4.9/_install/install/include/opencv2
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_calib3d.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_core.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_dnn.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_features2d.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_flann.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_highgui.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_imgcodecs.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_imgproc.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_ml.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_objdetect.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_photo.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_shape.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_stitching.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_superres.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_videoio.so
LIBS+=/home/wbyq/work_pc/opencv-3.4.9/_install/install/lib/libopencv_video.so
}
RESOURCES += \
image.qrc
widget.cpp文件代碼:
#include "widget.h"
#include "ui_widget.h"
class VideoAudioEncode videoaudioencode_0;
Widget::Widget(QWidget *parent)
: QWidget(parent)
, ui(new Ui::Widget)
{
ui->setupUi(this);
//駕駛室攝像頭
//工作對象
videoRead_WorkClass_0=new VideoReadThread_0;
videoRead_Workthread_0=new QThread;
//連接攝像頭採集信號,在主線程實時顯示視頻畫面
connect(videoRead_WorkClass_0,SIGNAL(VideoDataOutput(QImage )),this,SLOT(VideoDataDisplay_0(QImage )));
//攝像頭初始化函數
connect(this,SIGNAL(Init_VideoAudio_WorkClass_0()),videoRead_WorkClass_0,SLOT(run()));
//停止視頻採集
connect(this,SIGNAL(Stop_AudioVideo0_work_0()),videoRead_WorkClass_0,SLOT(stop()));
//將工作對象移動到子線程裏工作
videoRead_WorkClass_0->moveToThread(videoRead_Workthread_0);
//更新設備列表
UpdateVideoAudiodDevice(ui->comboBox_video_0,ui->plainTextEdit_log_0);
//
timer.start(100);
connect(&timer,SIGNAL(timeout()), this, SLOT(update()));
timer.start(100);
}
Widget::~Widget()
{
delete ui;
}
//分類器的路徑
#define source_xml_addr "/home/wbyq/work_pc/opencv-3.4.9/data/haarcascades_cuda/haarcascade_frontalface_alt2.xml"
//將要檢測的圖片路徑
#define source_pix_addr "/mnt/hgfs/linux-share-dir/1.jpg"
//人臉檢測代碼
void Widget::opencv_face(QImage qImage)
{
QTime time;
time.start();
static CvMemStorage* storage = 0;
static CvHaarClassifierCascade* cascade = 0;
const char*cascade_name =source_xml_addr;
//加載分類器
cascade = (CvHaarClassifierCascade*)cvLoad( cascade_name, 0, 0, 0 );
if( !cascade )
{
Log_Display_0("分類器加載錯誤.\n");
return ;
}
//創建內存空間
storage = cvCreateMemStorage(0);
//加載需要檢測的圖片
//const char* filename =source_pix_addr;
//IplImage* img = cvLoadImage(filename, 1 );
IplImage* img = QImageToIplImage(&qImage);
if(img ==nullptr )
{
Log_Display_0("圖片加載錯誤.\n");
return;
}
double scale=1.2;
static CvScalar colors[] = {
{{0,0,255}},{{0,128,255}},{{0,255,255}},{{0,255,0}},
{{255,128,0}},{{255,255,0}},{{255,0,0}},{{255,0,255}}
};//Just some pretty colors to draw with
IplImage* gray = cvCreateImage(cvSize(img->width,img->height),8,1);
IplImage* small_img=cvCreateImage(cvSize(cvRound(img->width/scale),cvRound(img->height/scale)),8,1);
cvCvtColor(img,gray, CV_BGR2GRAY);
cvResize(gray, small_img, CV_INTER_LINEAR);
cvEqualizeHist(small_img,small_img); //直方圖均衡
cvClearMemStorage(storage);
double t = (double)cvGetTickCount();
CvSeq* objects = cvHaarDetectObjects(small_img,
cascade,
storage,
1.1,
2,
0/*CV_HAAR_DO_CANNY_PRUNING*/,
cvSize(30,30));
t = (double)cvGetTickCount() - t;
//遍歷找到對象和周圍畫盒
for(int i=0;i<(objects->total);++i)
{
CvRect* r=(CvRect*)cvGetSeqElem(objects,i);
cvRectangle(img, cvPoint(r->x*scale,r->y*scale), cvPoint((r->x+r->width)*scale,(r->y+r->height)*scale), colors[i%8]);
}
for( int i = 0; i < (objects? objects->total : 0); i++ )
{
CvRect* r = (CvRect*)cvGetSeqElem( objects, i );
CvPoint center;
int radius;
center.x = cvRound((r->x + r->width*0.5)*scale);
center.y = cvRound((r->y + r->height*0.5)*scale);
radius = cvRound((r->width + r->height)*0.25*scale);
cvCircle(img, center, radius, colors[i%8], 3, 8, 0 );
}
show_face(img); //顯示檢測的結果
cvReleaseImage(&gray);
cvReleaseImage(&small_img);
//釋放圖片
cvReleaseImage(&img);
Log_Display_0(tr("耗時:%1 ms\n").arg(time.elapsed()));
}
/*將QImage圖片轉爲opecv的qimage格式*/
IplImage *Widget::QImageToIplImage(const QImage * qImage)
{
int width = qImage->width();
int height = qImage->height();
CvSize Size;
Size.height = height;
Size.width = width;
IplImage *IplImageBuffer = cvCreateImage(Size, IPL_DEPTH_8U, 3);
for (int y = 0; y < height; ++y)
{
for (int x = 0; x < width; ++x)
{
QRgb rgb = qImage->pixel(x, y);
CV_IMAGE_ELEM( IplImageBuffer, uchar, y, x*3+0 ) = qBlue(rgb);
CV_IMAGE_ELEM( IplImageBuffer, uchar, y, x*3+1 ) = qGreen(rgb);
CV_IMAGE_ELEM( IplImageBuffer, uchar, y, x*3+2 ) = qRed(rgb);
}
}
return IplImageBuffer;
}
/*將opecv的圖片轉爲qimage格式*/
QImage *Widget::IplImageToQImage(IplImage *img)
{
QImage *qmg;
uchar *imgData=(uchar *)img->imageData;
qmg = new QImage(imgData,img->width,img->height,QImage::Format_RGB888);
*qmg=qmg->rgbSwapped(); //BGR格式轉RGB
return qmg;
}
//顯示檢測的結果
void Widget::show_face(IplImage* img)
{
uchar *imgData=(uchar *)img->imageData;
QImage my_image=QImage(imgData,img->width,img->height,QImage::Format_RGB888);
my_image=my_image.rgbSwapped(); //BGR格式轉RGB
QPixmap my_pix; //創建畫圖類
my_pix.convertFromImage(my_image);
/*在控件上顯示*/
ui->label_display->setPixmap(my_pix);
}
//開始採集
void Widget::on_pushButton_Start_clicked()
{
//設置當前選擇的攝像頭
videoaudioencode_0.camera=video_dev_list.at(ui->comboBox_video_0->currentIndex());
Stop_VideoAudioEncode_0(true);
Start_VideoAudioEncode_Thread_0();
}
//析構函數
VideoReadThread_0::~VideoReadThread_0()
{
}
//停止視頻採集
void VideoReadThread_0::stop()
{
qDebug()<<"停止視頻採集--stop";
if(camera)
{
camera->stop();
delete camera;
camera=nullptr;
}
if(m_pProbe)
{
delete m_pProbe;
m_pProbe=nullptr;
}
}
//執行線程
void VideoReadThread_0::run()
{
stop();
Camear_Init();
qDebug()<<"攝像頭開始採集數據";
}
void VideoReadThread_0::Camear_Init()
{
/*創建攝像頭對象,根據選擇的攝像頭打開*/
camera = new QCamera(videoaudioencode_0.camera);
m_pProbe = new QVideoProbe;
if(m_pProbe != nullptr)
{
m_pProbe->setSource(camera); // Returns true, hopefully.
connect(m_pProbe, SIGNAL(videoFrameProbed(QVideoFrame)),this, SLOT(slotOnProbeFrame(QVideoFrame)), Qt::QueuedConnection);
}
/*配置攝像頭捕 QCamera *camera;
QVideoProbe *m_pProbe;獲模式爲幀捕獲模式*/
//camera->setCaptureMode(QCamera::CaptureStillImage); //如果在Linux系統下運行就這樣設置
camera->setCaptureMode(QCamera::CaptureVideo);//如果在android系統下運行就這樣設置
/*啓動攝像頭*/
camera->start();
/*設置攝像頭的採集幀率和分辨率*/
QCameraViewfinderSettings settings;
settings.setPixelFormat(QVideoFrame::Format_YUYV); //設置像素格式 Android上只支持NV21格式
settings.setResolution(QSize(VIDEO_WIDTH,VIDEO_HEIGHT)); //設置攝像頭的分辨率
camera->setViewfinderSettings(settings);
}
/**
* NV21是android相機默認格式
* @param data
* @param rgb
* @param width
* @param height
*/
void NV21_TO_RGB24(unsigned char *yuyv, unsigned char *rgb, int width, int height)
{
const int nv_start = width * height ;
int index = 0, rgb_index = 0;
uint8_t y, u, v;
int r, g, b, nv_index = 0,i, j;
for(i = 0; i < height; i++){
for(j = 0; j < width; j ++){
//nv_index = (rgb_index / 2 - width / 2 * ((i + 1) / 2)) * 2;
nv_index = i / 2 * width + j - j % 2;
y = yuyv[rgb_index];
u = yuyv[nv_start + nv_index ];
v = yuyv[nv_start + nv_index + 1];
r = y + (140 * (v-128))/100; //r
g = y - (34 * (u-128))/100 - (71 * (v-128))/100; //g
b = y + (177 * (u-128))/100; //b
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
index = rgb_index % width + (height - i - 1) * width;
//rgb[index * 3+0] = b;
//rgb[index * 3+1] = g;
//rgb[index * 3+2] = r;
//顛倒圖像
//rgb[height * width * 3 - i * width * 3 - 3 * j - 1] = b;
//rgb[height * width * 3 - i * width * 3 - 3 * j - 2] = g;
//rgb[height * width * 3 - i * width * 3 - 3 * j - 3] = r;
//正面圖像
rgb[i * width * 3 + 3 * j + 0] = b;
rgb[i * width * 3 + 3 * j + 1] = g;
rgb[i * width * 3 + 3 * j + 2] = r;
rgb_index++;
}
}
}
/*
函數功能: 將YUV數據轉爲RGB格式
函數參數:
unsigned char *yuv_buffer: YUV源數據
unsigned char *rgb_buffer: 轉換之後的RGB數據
int iWidth,int iHeight : 圖像的寬度和高度
*/
void yuyv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight)
{
int x;
int z=0;
unsigned char *ptr = rgb_buffer;
unsigned char *yuyv= yuv_buffer;
for (x = 0; x < iWidth*iHeight; x++)
{
int r, g, b;
int y, u, v;
if (!z)
y = yuyv[0] << 8;
else
y = yuyv[2] << 8;
u = yuyv[1] - 128;
v = yuyv[3] - 128;
r = (y + (359 * v)) >> 8;
g = (y - (88 * u) - (183 * v)) >> 8;
b = (y + (454 * u)) >> 8;
*(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r);
*(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g);
*(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b);
if(z++)
{
z = 0;
yuyv += 4;
}
}
}
void VideoReadThread_0::slotOnProbeFrame(const QVideoFrame &frame)
{
QVideoFrame cloneFrame(frame);
cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
//qDebug()<<"height:"<<cloneFrame.height();
//qDebug()<<"width:"<<cloneFrame.width();
//qDebug()<<"bytesPerLine:"<<cloneFrame.bytesPerLine();
//qDebug()<<"mappedBytes:"<<cloneFrame.mappedBytes();
//qDebug()<<"pixelFormat:"<<cloneFrame.pixelFormat();
unsigned char rgb_buffer[VIDEO_WIDTH*VIDEO_HEIGHT*3];
if(cloneFrame.pixelFormat()==QVideoFrame::Format_NV21)
{
NV21_TO_RGB24(cloneFrame.bits(),rgb_buffer,cloneFrame.width(),cloneFrame.height());
}
else if(cloneFrame.pixelFormat()==QVideoFrame::Format_YUYV)
{
yuyv_to_rgb(cloneFrame.bits(),rgb_buffer,cloneFrame.width(),cloneFrame.height());
}
else
{
qDebug("當前格式編碼爲%1,暫時不支持轉換.\n");
}
cloneFrame.unmap();
//加載圖片數據
QImage image(rgb_buffer,
cloneFrame.width(),
cloneFrame.height(),
QImage::Format_RGB888);
//繪製圖片水印
QDateTime dateTime(QDateTime::currentDateTime());
//時間效果: 2020-03-05 16:25::04 週一
QString qStr="";
qStr+=dateTime.toString("yyyy-MM-dd hh:mm:ss ddd");
QPainter pp(&image);
QPen pen = QPen(Qt::white);
pp.setPen(pen);
pp.drawText(QPointF(0,20),qStr);
//提取RGB數據
unsigned char *p=rgb_buffer;
for(int i=0;i<image.height();i++)
{
for(int j=0;j<image.width();j++)
{
QRgb rgb=image.pixel(j,i);
*p++=qRed(rgb);
*p++=qGreen(rgb);
*p++=qBlue(rgb);
}
}
emit VideoDataOutput(image); //發送信號
}
//駕駛室:視頻刷新顯示
void Widget::VideoDataDisplay_0(QImage image)
{
QPixmap my_pixmap;
my_pixmap.convertFromImage(image);
/*在控件上顯示*/
ui->label_ImageDisplay_0->setPixmap(my_pixmap);
}
//駕駛室:日誌顯示
void Widget::Log_Display_0(QString text)
{
Log_Text_Display(ui->plainTextEdit_log_0,text);
}
/*日誌顯示*/
void Widget::Log_Text_Display(QPlainTextEdit *plainTextEdit_log,QString text)
{
plainTextEdit_log->insertPlainText(text);
//移動滾動條到底部
QScrollBar *scrollbar = plainTextEdit_log->verticalScrollBar();
if(scrollbar)
{
scrollbar->setSliderPosition(scrollbar->maximum());
}
}
//駕駛室:開啓所有采集線程
void Widget::Start_VideoAudioEncode_Thread_0()
{
videoRead_Workthread_0->start(); //開啓視頻採集線程
Init_VideoAudio_WorkClass_0(); //發送初始化信號
}
//駕駛室:退出所有采集線程
void Widget::Stop_VideoAudioEncode_0(bool flag)
{
if(flag==true)
{
Stop_AudioVideo0_work_0(); //發送信號停止攝像頭
QThread::msleep(10);
//退出視頻採集
videoRead_Workthread_0->quit(); //告訴線程的事件循環以return 0(成功)退出
videoRead_Workthread_0->wait(); //等待線程退出
}
}
void Widget::on_pushButton_update_clicked()
{
UpdateVideoAudiodDevice(ui->comboBox_video_0,ui->plainTextEdit_log_0);
}
/*刷新本機可以的音頻設備列表*/
void Widget::UpdateVideoAudiodDevice(
QComboBox *comboBox_video,
QPlainTextEdit *plainTextEdit_log)
{
/*2. 獲取攝像頭列表*/
video_dev_list.clear();
comboBox_video->clear();
video_dev_list=QCameraInfo::availableCameras();
for(int i=0;i<video_dev_list.size();i++)
{
comboBox_video->addItem(video_dev_list.at(i).deviceName());
}
/*如果沒有可用的攝像頭設備,按鈕不可用*/
if(video_dev_list.size()==0)
{
Log_Text_Display(plainTextEdit_log,"未查詢到可用的攝像頭設備.\n");
}
}
//停止採集
void Widget::on_pushButton_stop_clicked()
{
Stop_VideoAudioEncode_0(true);
}
void Widget::update()
{
if(ui->label_ImageDisplay_0->pixmap())
opencv_face(ui->label_ImageDisplay_0->pixmap()->toImage());
}
widget.h文件代碼:
#ifndef WIDGET_H
#define WIDGET_H
#include <QWidget>
//opencv include
#include <cv.h>
#include <cxcore.h>
#include <highgui.h>
#include <QCamera>
#include <QVideoProbe>
#include <QAbstractVideoSurface>
#include <QVideoProbe>
#include <QThread>
#include <QApplication>
#include <QDebug>
#include <QObject>
#include <QMutex>
#include <QMutexLocker>
#include <QWaitCondition>
#include <QQueue>
#include <QCamera>
#include <QPen>
#include <QPainter>
#include <QRgb>
#include <QDateTime>
#include <QCameraInfo>
#include <QPlainTextEdit>
#include <QScrollBar>
#include <QComboBox>
#include <QTimer>
QT_BEGIN_NAMESPACE
namespace Ui { class Widget; }
QT_END_NAMESPACE
class Widget : public QWidget
{
Q_OBJECT
public:
Widget(QWidget *parent = nullptr);
void opencv_face(QImage qImage);
~Widget();
QList<QCameraInfo> video_dev_list;
void show_face(IplImage* img);
class VideoReadThread_0 *videoRead_WorkClass_0; //視頻工作類
QThread *videoRead_Workthread_0; //視頻線程
/*駕駛室攝像頭*/
void Stop_VideoAudioEncode_0(bool flag);//停止線程
void Start_VideoAudioEncode_Thread_0(); //啓動線程
void Log_Text_Display(QPlainTextEdit *plainTextEdit_log,QString text);
void UpdateVideoAudiodDevice(QComboBox *comboBox_video, QPlainTextEdit *plainTextEdit_log);
IplImage *QImageToIplImage(const QImage * qImage);
QImage *IplImageToQImage(IplImage *img);
QTimer timer;
signals:
void Init_VideoAudio_WorkClass_0();
void Stop_AudioVideo0_work_0();
private slots:
void update();
void on_pushButton_Start_clicked();
void Log_Display_0(QString text);
void VideoDataDisplay_0(QImage );
void on_pushButton_update_clicked();
void on_pushButton_stop_clicked();
private:
Ui::Widget *ui;
};
class VideoReadThread_0:public QObject
{
Q_OBJECT
public:
QCamera *camera;
QVideoProbe *m_pProbe;
VideoReadThread_0(QObject* parent=nullptr):QObject(parent){camera=nullptr;m_pProbe=nullptr;}
~VideoReadThread_0();
void Camear_Init(void);
public slots:
void stop();
void run();
void slotOnProbeFrame(const QVideoFrame &frame);
signals:
void VideoDataOutput(QImage); //輸出信號
};
//視頻音頻編碼類
class VideoAudioEncode
{
public:
QCameraInfo camera; //當前選擇的攝像頭
};
//視頻輸出尺寸
#define VIDEO_WIDTH 640
#define VIDEO_HEIGHT 480
extern class VideoAudioEncode videoaudioencode_0;
#endif // WIDGET_H
main.cpp文件代碼:
#include "widget.h"
#include <QApplication>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
Widget w;
w.show();
return a.exec();
}
UI界面:
下面公衆號裏有全套單片機、QT\C\C++教程(歡迎關注):