-
在搭建完成opencv 的環境之後,我們開始記錄一些簡單的圖像處理的過程:
以上是合成之後的效果.暫時看效果.
接下來具體實現的步驟.,下面代碼直接可以拿來使用.
//讀取三張圖片
QString imgqstrR = QStringLiteral("%1%2%3.bmp").arg(QCoreApplication::applicationDirPath()).arg("\\FrameDirec\\")
.arg(137);
QString imgqstrG = QStringLiteral("%1%2%3.bmp").arg(QCoreApplication::applicationDirPath()).arg("\\FrameDirec\\")
.arg(138);
QString imgqstrB = QStringLiteral("%1%2%3.bmp").arg(QCoreApplication::applicationDirPath()).arg("\\FrameDirec\\")
.arg(139);
//圖像處理步驟,色彩還原步驟
//1 提取圖像的亮度 r g b 的各個亮度.
double brightness_r = _qimage2MatInteroperate->getAvg(imgqstrR);
double brightness_g = _qimage2MatInteroperate->getAvg(imgqstrG);
double brightness_b = _qimage2MatInteroperate->getAvg(imgqstrB);
//求取亮度平均數值
double ageRgb = (brightness_r + brightness_g + brightness_b) / 3;
cv::Mat dst_r;
cv::Mat dst_g;
cv::Mat dst_b;
//重新設置亮度.
_qimage2MatInteroperate->setAvg(imgqstrR, dst_r, ageRgb);
_qimage2MatInteroperate->setAvg(imgqstrG, dst_g, ageRgb);
_qimage2MatInteroperate->setAvg(imgqstrB, dst_b, ageRgb);
//查看其亮度直方圖.
cv::Mat histogram_R, histogram_g, histogram_b;
_qimage2MatInteroperate->DrawHistogramRectangular(dst_r, histogram_R, Scalar(255), true, "hisogramR");
_qimage2MatInteroperate->DrawHistogramRectangular(dst_g, histogram_g, Scalar(255), true, "hisogramG");
_qimage2MatInteroperate->DrawHistogramRectangular(dst_b, histogram_b, Scalar(255), true, "hisogramB");
//測試平均之後的亮度. 測試,RGB三色灰度圖經過平均之後,其整體亮度差距不是很大
brightness_r = _qimage2MatInteroperate->getAvg(dst_r);
brightness_g = _qimage2MatInteroperate->getAvg(dst_g);
brightness_b = _qimage2MatInteroperate->getAvg(dst_b);
qDebug() << QString("%1 %2 %3").arg(brightness_r).arg(brightness_g).arg(brightness_b);
//3 將亮度平衡之後的圖片,合成一張圖片.
cv::Mat outDst, outDst2;
_qimage2MatInteroperate->mergeRGB(dst_r, dst_g, dst_b, outDst, outDst2, true);
//////////////////////////////////////////////////接下來就是上面所_qimage2MatInteroperate 這個對象調用的一些分裝好的一些算法
////可以直接使用
/////繪製直方圖
int Qimage2MatInteroperate::DrawHistogramRectangular(cv::Mat & srcMat, cv::Mat &dst, Scalar color, bool isShowWnd, QString title)
{
std::string titlestrPath = std::string((const char*)title.toLocal8Bit());
const char* titlestr = titlestrPath.data();
cv::Mat srcImage = srcMat.clone();
if (!srcImage.data)
{
std::cout << "fail to load image" << endl;
return 0;
}
//定義變量
Mat dstHist;
int dims = 1;
float hranges[] = { 0, 256 };
const float *ranges[] = { hranges }; // 這裏需要爲const類型
int size = 256;
int channels = 0;
//計算圖像的直方圖
calcHist(&srcImage, 1, &channels, Mat(), dstHist, dims, &size, ranges);
Mat dstImage(size, size, CV_8U, Scalar(0));
//獲取最大值和最小值
double minValue = 0;
double maxValue = 0;
minMaxLoc(dstHist, &minValue, &maxValue, 0, 0); // 在cv中用的是cvGetMinMaxHistValue
//繪製出直方圖
//saturate_cast函數的作用即是:當運算完之後,結果爲負,則轉爲0,結果超出255,則爲255。
int hpt = saturate_cast<int>(0.9 * size);
for (int i = 0; i < 256; i++)
{
float binValue = dstHist.at<float>(i); // 注意hist中是float類型
//拉伸到0-max
int realValue = saturate_cast<int>(binValue * hpt / maxValue);
line(dstImage, Point(i, size - 1), Point(i, size - realValue), color);
}
if (isShowWnd == true)
{
imshow("原圖" + titlestrPath, srcImage);
imshow(titlestrPath, dstImage);
}
dst = dstImage.clone();
return 0;
}
///設置平均亮度的.
void Qimage2MatInteroperate::setAvg(QString scr, Mat &dst, double avg)
{
scr.replace("/", "\\");
std::string path = std::string((const char*)scr.toLocal8Bit());
const char* strpath = path.data();
cv::Mat source = imread(strpath);
if (!source.data)
{
return;
}
double fpreAvg = getAvg(source);
source.convertTo(dst, source.type(), avg / fpreAvg);
}
///獲取平均亮度的.
double Qimage2MatInteroperate::getAvg(Mat img)
{
Mat gray;
cvtColor(img, gray, CV_RGB2GRAY);
Scalar scalar = mean(gray);
return scalar.val[0];
}
/////這個算法,就是將原圖中,不均勻的亮度,平均的分散到整幅圖上(這個參考別人的.)
void Qimage2MatInteroperate::unevenLightCompensate(Mat & image, int blockSize)
{
if (image.channels() == 3) cvtColor(image, image, 7);
double average = mean(image)[0];
int rows_new = ceil(double(image.rows) / double(blockSize));
int cols_new = ceil(double(image.cols) / double(blockSize));
Mat blockImage;
blockImage = Mat::zeros(rows_new, cols_new, CV_32FC1);
for (int i = 0; i < rows_new; i++)
{
for (int j = 0; j < cols_new; j++)
{
int rowmin = i * blockSize;
int rowmax = (i + 1)*blockSize;
if (rowmax > image.rows) rowmax = image.rows;
int colmin = j * blockSize;
int colmax = (j + 1)*blockSize;
if (colmax > image.cols) colmax = image.cols;
Mat imageROI = image(Range(rowmin, rowmax), Range(colmin, colmax));
double temaver = mean(imageROI)[0];
blockImage.at<float>(i, j) = temaver;
}
}
blockImage = blockImage - average;
Mat blockImage2;
resize(blockImage, blockImage2, image.size(), (0, 0), (0, 0), INTER_CUBIC);
Mat image2;
image.convertTo(image2, CV_32FC1);
Mat dst = image2 - blockImage2;
dst.convertTo(image, CV_8UC1);
}
///這個纔是將三張灰度圖合成到一張上面來.
void Qimage2MatInteroperate::mergeRGB(cv::Mat imgqR, cv::Mat imgqG, cv::Mat imgqB, cv::Mat &outMergeDst, cv::Mat &outMergeBalance, bool IsWhite)
{
const char* str_R = ".\\imgqr.bmp";
const char* str_G = ".\\imgqg.bmp";
const char* str_B = ".\\imgqb.bmp";
imwrite(str_R, imgqR);
imwrite(str_G, imgqG);
imwrite(str_B, imgqB);
IplImage *srcImgR = cvLoadImage(str_R, 0);//讀取圖片 0 灰度圖. -1
IplImage *srcImgG = cvLoadImage(str_G, 0);//讀取圖片
IplImage *srcImgB = cvLoadImage(str_B, 0);//讀取圖片
IplImage *dstImg = cvCreateImage(cvGetSize(srcImgR), 8, 3);
IplImage *dstImg_Balance = cvCreateImage(cvGetSize(srcImgR), 8, 3);
cvNamedWindow("src_R", CV_WINDOW_NORMAL);
cvShowImage("src_R", srcImgR);
cvNamedWindow("src_G", CV_WINDOW_NORMAL);
cvShowImage("src_G", srcImgG);
cvNamedWindow("src_B", CV_WINDOW_NORMAL);
cvShowImage("src_B", srcImgB);
cvMerge(srcImgR, srcImgG, srcImgB, NULL, dstImg);//合併操作後的通道,爲最終結果
//這裏合併和可以使用addweighted
//cvAddWeighted();
//不均勻光補償方式.
cv::Mat mat_r, mat_g, mat_b, mat_dstImg_Balance;
mat_r = cv::cvarrToMat(srcImgR);
mat_g = cv::cvarrToMat(srcImgG);
mat_b = cv::cvarrToMat(srcImgB);
/*mat_r = imgqR.clone();
mat_g = imgqG.clone();
mat_b = imgqB.clone();*/
int blocksize = 100; //目測 75 - 100
unevenLightCompensate(mat_r, 90);//75 80 80 偏黃色。 92 100 100
unevenLightCompensate(mat_g, blocksize); //偏紅色
unevenLightCompensate(mat_b, blocksize); //片綠色 冷硬色
srcImgR = &IplImage(mat_r);
srcImgG = &IplImage(mat_g);
srcImgB = &IplImage(mat_b);
if (IsWhite == true)
{
color_balance(srcImgR, Banlece_RGB::R, 0.00366);//對R通道進行色彩平衡
color_balance(srcImgG, Banlece_RGB::G, 0.00334);//對G通道進行色彩平衡
color_balance(srcImgB, Banlece_RGB::B, 0.003);//對B通道進行色彩平衡
cvMerge(srcImgR, srcImgG, srcImgB, NULL, dstImg_Balance);//合併操作後的通道,爲最終結果
//合併完成之後,做一下掩模運算.增加對比度.
mat_dstImg_Balance = cv::cvarrToMat(dstImg_Balance);
//GaussianBlur(mat_dstImg_Balance, mat_dstImg_Balance,cv::Size(5,5),0,0);
//中濾波
medianBlur(mat_dstImg_Balance, mat_dstImg_Balance, 3);
//bilateralFilter(mat_dstImg_Balance,mat_dstImg_Balance,1,30,3);
cv::Mat kernel = (Mat_<char>(3, 3) << 0, -1, 0, -1, 5.5, -1, 0, -1, 0);
filter2D(mat_dstImg_Balance, mat_dstImg_Balance, -1, kernel, Point(-1, -1));
}
cvNamedWindow("dst", CV_WINDOW_NORMAL);
cvShowImage("dst", dstImg);
cv::Mat temp(dstImg);
cv::imwrite("balance1.bmp", temp);
cvNamedWindow("dstImg_Balance", CV_WINDOW_NORMAL);
cvShowImage("dstImg_Balance", dstImg_Balance);
cv::Mat temp2(dstImg_Balance);
cv::imwrite("balance2.bmp", temp2);
}