NiirsCpp/NiirsLinux/niirs.cpp

3230 lines
104 KiB
C++
Raw Normal View History

2023-02-22 11:56:10 +08:00
#include "niirs.h"
NIIRS::NIIRS(QWidget* parent)
: QMainWindow(parent)
{
ui.setupUi(this);
connect(ui.radioVisible, &QRadioButton::clicked, this, &NIIRS::onRadioBtn_Visi);
connect(ui.radioMidInfrared, &QRadioButton::clicked, this, &NIIRS::onRadioBtn_Infra);
connect(ui.radioLonInfrared, &QRadioButton::clicked, this, &NIIRS::onRadioBtn_Infra);
ui.textFormula->setText(QString::fromLocal8Bit("可见光数据计算公式:\n") +
"NIIRS = 10.251 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR) | RER>=0.9:a=3.32,b=1.559;RER<0.9:a=3.16,b=2.817");
connect(ui.pbtCalculate, &QPushButton::clicked, this, &NIIRS::calculateNiirsScore);
connect(ui.pbtObjectivePara, &QPushButton::clicked, this, &NIIRS::calculateObjectivePara);
//connect(ui.pbtObjectivePara, &QPushButton::clicked, this, &NIIRS::calculateEvaluationImage);
//connect(ui.pbtRER, &QPushButton::clicked, this, &NIIRS::calculateRER);
ui.lineEdit_GSDX->setText("1.0");
ui.lineEdit_GSDY->setText("1.0");
//ui.lineEdit_RERX->setText("0.986");
//ui.lineEdit_RERY->setText("0.965");
//ui.lineEdit_MTFC_H->setText("0.9");
ui.lineEdit_RER->setText("0.986");
ui.lineEdit_H->setText("0.9");
ui.lineEdit_MTFC_G->setText("1");
ui.lineEdit_SNR->setText("20");
ui.lineEdit_PIQE->setText("0.986");
QString inputImgPath = mXmlImgPath;
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO");
string tiffEdgePath = inputImgPath.toStdString();
GDALDataset* poDataset;
poDataset = (GDALDataset*)GDALOpen(tiffEdgePath.c_str(), GA_ReadOnly);
if (NULL != poDataset)
{
for (int i = 0; i < poDataset->GetRasterCount(); i++)
ui.comboBoxBand->addItem(QString("band:%1").arg(i + 1));
}
GDALClose(poDataset);
}
QStringList NIIRS::getAllFiles(QString path, QString fileType)
{
// 判断路径是否存在
QDir dir(path);
if (!dir.exists())
{
QMessageBox::warning(this, "warning", "no such path:\n" + path);
return QStringList();
}
dir.setFilter(QDir::Files | QDir::NoSymLinks);
QFileInfoList infoList = dir.entryInfoList();
int fileCount = infoList.count();
if (fileCount <= 0)
{
QMessageBox::warning(this, "warning", "No files in folder:\n" + path);
return QStringList();
}
QStringList files;
for (int i = 0; i < fileCount; i++)
{
QFileInfo fileInfo = infoList.at(i);
QString suffix = fileInfo.suffix();
if (QString::compare(suffix, fileType, Qt::CaseInsensitive) == 0)
{
QString absoluteFilePath = fileInfo.absoluteFilePath();
files.append(absoluteFilePath);
}
}
return files;
}
void NIIRS::onRadioBtn_Visi()
{
ui.textFormula->setText(QString::fromLocal8Bit("可见光数据计算公式:\n") +
"NIIRS = 10.251 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR) | RER>=0.9:a=3.32,b=1.559;RER<0.9:a=3.16,b=2.817");
}
void NIIRS::onRadioBtn_Infra()
{
if (ui.radioMidInfrared->isChecked())
ui.textFormula->setText(QString::fromLocal8Bit("中红外数据计算公式:\n")
+ "NIIRS = 10.751 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR) | RER>=0.9:a=3.32,b=1.559;RER<0.9:a=3.16,b=2.817");
else if (ui.radioLonInfrared->isChecked())
ui.textFormula->setText(QString::fromLocal8Bit("长波红外数据计算公式:\n")
+ "NIIRS = 10.751 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR) | RER>=0.9:a=3.32,b=1.559;RER<0.9:a=3.16,b=2.817");
}
void NIIRS::on_pbtOpenImg_clicked()
{
QString openImg = QFileDialog::getOpenFileName(this, "", "", "*.tif *.tiff");
if (!openImg.isEmpty())
ui.lineEdit_img->setText(openImg);
else
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开数据"));
//QString openImg = QFileDialog::getExistingDirectory(this, "", "");
//if (!openImg.isEmpty())
// ui.lineEdit_img->setText(openImg);
//else
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开路径"));
}
//void NIIRS::on_pbtOpenEdgeX_clicked()
//{
// QString edgeX = QFileDialog::getOpenFileName(this, "", "", "*.tif *.tiff");
// if (!edgeX.isEmpty())
// ui.lineEdit_EdgeX->setText(edgeX);
// else
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开数据"));
// //QString edgeX= QFileDialog::getOpenFileName(this, "", "", "*.jpg");
// //if (!edgeX.isEmpty())
// // ui.lineEdit_EdgeX->setText(edgeX);
// //else
// // ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开数据"));
//}
//void NIIRS::on_pbtOpenEdgeY_clicked()
//{
// QString edgeY = QFileDialog::getOpenFileName(this, "", "", "*.tif *.tiff");
// if (!edgeY.isEmpty())
// ui.lineEdit_EdgeY->setText(edgeY);
// else
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开数据"));
// //QString edgeY = QFileDialog::getOpenFileName(this, "", "", "*.jpg");
// //if (!edgeY.isEmpty())
// // ui.lineEdit_EdgeY->setText(edgeY);
// //else
// // ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开数据"));
//}
int NIIRS::getImageInfo(const char* srcImgPath, struct ImgInfo* imgInfo)
{
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO"); //中文路径
GDALDataset* hSrcDS;
hSrcDS = (GDALDataset*)GDALOpen(srcImgPath, GA_ReadOnly);
if (NULL == hSrcDS)
{
GDALClose(hSrcDS);
return -1;
}
GDALRasterBand* rasterBand;
rasterBand = hSrcDS->GetRasterBand(1);
imgInfo->dataType = rasterBand->GetRasterDataType();
imgInfo->bandCount = hSrcDS->GetRasterCount();
imgInfo->xSize = hSrcDS->GetRasterXSize();
imgInfo->ySize = hSrcDS->GetRasterYSize();
imgInfo->xOffset = 0;
imgInfo->yOffset = 0;
strcpy(imgInfo->projWkt, hSrcDS->GetProjectionRef());
hSrcDS->GetGeoTransform(imgInfo->geoTransform);
imgInfo->noData = rasterBand->GetNoDataValue();
GDALClose(hSrcDS);
return 0;
}
int NIIRS::readTiffFile(const char* srcImgPath, ImgInfo imgInfo, void* imgArray, int bandIndex)
{
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO"); //中文路径
GDALDataset* poSrcDS = (GDALDataset*)GDALOpen(srcImgPath, GA_ReadOnly);
//返回错误信息
int errInfo = 0;
if (NULL == poSrcDS)
{
GDALClose(poSrcDS);
return -1;
}
//输入波段索引大于数据波段数
if (bandIndex > imgInfo.bandCount)
{
GDALClose(poSrcDS);
return -1;
}
//读取全部波段
if (0 == bandIndex)
{
errInfo = poSrcDS->RasterIO(GF_Read, imgInfo.xOffset, imgInfo.yOffset, imgInfo.xSize, imgInfo.ySize,
imgArray, imgInfo.xSize, imgInfo.ySize, imgInfo.dataType, imgInfo.bandCount, 0, 0, 0, 0);
}
//读取指定波段
else
{
GDALRasterBand* poBand = poSrcDS->GetRasterBand(bandIndex);
errInfo = poBand->RasterIO(GF_Read, imgInfo.xOffset, imgInfo.yOffset, imgInfo.xSize, imgInfo.ySize,
imgArray, imgInfo.xSize, imgInfo.ySize, imgInfo.dataType, 0, 0);
}
GDALClose((GDALDatasetH)poSrcDS);
return errInfo;
}
void NIIRS::initTableWidget(int rowNum, int colNum)
{
ui.tableWidget->clear();
ui.tableWidget->setColumnCount(colNum);
ui.tableWidget->setRowCount(rowNum);
QStringList header;
for (int i = 0; i < colNum; i++)
header << "band " + QString::number(i + 1);
//header << "band 1" << "band 2" << "band 3" << "band 4";
ui.tableWidget->setHorizontalHeaderLabels(header);
header.clear();
header << QString::fromLocal8Bit("亮度") << QString::fromLocal8Bit("标准差") << QString::fromLocal8Bit("对比度") << QString::fromLocal8Bit("")
<< QString::fromLocal8Bit("信噪比") << QString::fromLocal8Bit("陡度") << QString::fromLocal8Bit("清晰度") << QString::fromLocal8Bit("灰度最大值")
<< QString::fromLocal8Bit("灰度最小值") << QString::fromLocal8Bit("PIQE")
<< QString::fromLocal8Bit("0°二阶矩") << QString::fromLocal8Bit("90°二阶矩")
<< QString::fromLocal8Bit("45°二阶矩") << QString::fromLocal8Bit("135°二阶矩")
<< QString::fromLocal8Bit("RER");
ui.tableWidget->setVerticalHeaderLabels(header);
ui.tableWidget->setVerticalScrollMode(QAbstractItemView::ScrollPerPixel);
ui.tableWidget->setHorizontalScrollMode(QAbstractItemView::ScrollPerPixel);
}
void NIIRS::setTableWidgetValue(double val, int row, int col)
{
ui.tableWidget->setItem(row, col, new QTableWidgetItem(QString::number(val, 10, 6)));
}
void NIIRS::setXmlInterface(QString strXmlInterface)
{
mXmlInterface = strXmlInterface;
//qDebug() << "setXmlInterface:" << mXmlInterface;
std::cout << "setXmlInterface:" << mXmlInterface.toStdString();
//解析xml
if (mXmlInterface != "")
{
QFile fileXML(mXmlInterface);
if (fileXML.open(QIODevice::ReadOnly))
{
QDomDocument dom;
QString errorStr;
int errorLine = 0;
int errorCol = 0;
if (!dom.setContent(&fileXML, true, &errorStr, &errorLine, &errorCol))
{
ui.lineEdit_INFO->setText(errorStr + ", line: " + QString::number(errorLine) + ", col: " + QString::number(errorCol));
fileXML.close();
return;
}
else
{
QDomElement root = dom.documentElement();
QDomNode current = root;
QStack<QDomNode> stack;
//遍历XML节点
while (1)
{
if (!current.isNull())
{
//查找并读取XML中参数
QDomElement element = current.toElement();
if (element.tagName() == "InputImg")
mXmlImgPath = element.firstChild().toCharacterData().data();
else if (element.tagName() == "TopLeftX")
mXmlTopLeftX = element.firstChild().toCharacterData().data();
else if (element.tagName() == "TopLeftY")
mXmlTopLeftY = element.firstChild().toCharacterData().data();
else if (element.tagName() == "BottomRightX")
mXmlBottomRightX = element.firstChild().toCharacterData().data();
else if (element.tagName() == "BottomRightY")
mXmlBottomRightY = element.firstChild().toCharacterData().data();
else if (element.tagName() == "ProductDir")
mXmlProductDir = element.firstChild().toCharacterData().data();
else if (element.tagName() == "ProductResultXML")
mResultXmlDir = element.firstChild().toCharacterData().data();
stack.push(current);
current = current.firstChild();
}
else if (!stack.count())
break;
else
current = stack.pop().nextSibling();
}
}
fileXML.close();
}
}
if (mXmlImgPath != "")
ui.lineEdit_img->setText(mXmlImgPath);
qDebug() << mXmlImgPath;
qDebug() << mXmlTopLeftX;
qDebug() << mXmlTopLeftY;
qDebug() << mXmlBottomRightX;
qDebug() << mXmlBottomRightY;
qDebug() << mXmlProductDir;
ui.lineEditLTx->setText(mXmlTopLeftX);
ui.lineEditLTy->setText(mXmlTopLeftY);
ui.lineEditRBx->setText(mXmlBottomRightX);
ui.lineEditRBy->setText(mXmlBottomRightY);
}
void NIIRS::calculateObjectivePara()
{
ui.lineEdit_INFO->clear();
ui.tableWidget->clear();
ui.tableWidget->setColumnCount(0);
ui.tableWidget->setRowCount(0);
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO"); //中文路径
//读取tiff
QString evaluateImg = ui.lineEdit_img->text();
if (evaluateImg.isEmpty())
{
//QMessageBox::warning(this, "warning", "The image to be evaluated is not opened");
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开待评估的图像"));
return;
}
string tiffPath = evaluateImg.toStdString();
GDALDataset* poDataset;
poDataset = (GDALDataset*)GDALOpen(tiffPath.c_str(), GA_ReadOnly);
if (NULL == poDataset)
{
GDALClose(poDataset);
return;
}
GDALRasterBand* pRasterBand;
GDALRasterBand* pClipBand;
vector<unsigned short*> pBuf;
//存储各波段参数
vector<double>vecGrayMax;
vector<double>vecGrayMin;
vector<double>vecLight;
vector<double>vecStdDev;
vector<double>vecContrastRadio;
vector<double>vecSNR;
vector<double>vecGradient;
vector<double>vecDefinition;
vector<double>vecPIQE;
vector<double>vecEntropy;
vector<vector<double>>vecASM;
vector<double>temp;
//poDataset->GetMetadata();
GDALRasterBand* pTestBand = poDataset->GetRasterBand(1);
GDALDataType dataType = pTestBand->GetRasterDataType();
if (dataType != GDT_UInt16)// && dataType != GDT_Byte
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入影像数据类型无法用于计算客观参数"));
return;
}
//qDebug()<<"Dataset data type:GDT_UInt16";
int bandCount = poDataset->GetRasterCount();
int iwidth = pTestBand->GetXSize();
int iheight = pTestBand->GetYSize();
// 根据裁切范围确定裁切后的图像宽高
// int topLeftX = mXmlTopLeftX.toInt();
// int topLeftY = mXmlTopLeftY.toInt();
// int bottomRightX = mXmlBottomRightX.toInt();
// int bottomRightY = mXmlBottomRightY.toInt();
int topLeftX = ui.lineEditLTx->text().toInt();
int topLeftY = ui.lineEditLTy->text().toInt();
int bottomRightX = ui.lineEditRBx->text().toInt();
int bottomRightY = ui.lineEditRBy->text().toInt();
int clipWidth = bottomRightX - topLeftX;
int clipHeight = bottomRightY - topLeftY;
bool isCliped = false;
//逐波段计算参数
for (int i = 0; i < bandCount; i++)
{
unsigned short* bandBuf = new unsigned short[iwidth * iheight];
unsigned short* bandBufClip = new unsigned short[clipWidth * clipHeight];
vecASM.push_back(temp);
pRasterBand = poDataset->GetRasterBand(i + 1);
//在当前文件夹创建单波段裁剪图
QString tempTiffPath = QCoreApplication::applicationDirPath();
tempTiffPath = tempTiffPath + "/test.tiff";
//计算1.亮度、2.标准差5.信噪比=Mean/StdDev
double bandMin = 0;
double bandMax = 0;
double bandMean = 0;
double bandStdDev = 0;
//clipWidth = 0;
//判断是否裁剪
if (clipWidth > 0 && clipHeight > 0)
{
isCliped = true;
//解析波段
if (pRasterBand->RasterIO(GF_Read, topLeftX, topLeftY, clipWidth, clipHeight, bandBufClip, clipWidth, clipHeight, dataType, 0, 0) != 0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("解析裁剪数据出现错误, 波段:") + QString::number(i + 1));
continue;
}
pBuf.push_back(bandBufClip);
//计算1.亮度2.标准差5.信噪比
GDALDriver* poDriver = GetGDALDriverManager()->GetDriverByName("GTIFF");
GDALDataset* clipDataset = poDriver->Create(tempTiffPath.toStdString().c_str(), clipWidth, clipHeight, 1, dataType, 0);
pClipBand = clipDataset->GetRasterBand(1);
pClipBand->RasterIO(GF_Write, 0, 0, clipWidth, clipHeight, bandBufClip, clipWidth, clipHeight, dataType, 0, 0);
pClipBand->ComputeStatistics(false, &bandMin, &bandMax, &bandMean, &bandStdDev, NULL, NULL);
vecLight.push_back(bandMean);
vecStdDev.push_back(bandStdDev);
vecSNR.push_back(bandMean / bandStdDev);
vecGrayMax.push_back(bandMax);
vecGrayMin.push_back(bandMin);
//4.计算图像熵
//std::cout<<"calculateTiffEntropy bandMax: "<<bandMax;
double tiffEntropy = calculateTiffEntropy(pBuf[i], clipWidth * clipHeight, bandMax);
vecEntropy.push_back(tiffEntropy);
GDALClose(clipDataset);
remove(tempTiffPath.toStdString().c_str());//删除test.tiff
}
else
{
isCliped = false;
//解析波段
if (pRasterBand->RasterIO(GF_Read, 0, 0, iwidth, iheight, bandBuf, iwidth, iheight, dataType, 0, 0) != 0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误, 波段:") + QString::number(i + 1));
continue;
}
pBuf.push_back(bandBuf);
//计算1.亮度2.标准差5.信噪比
pRasterBand->ComputeStatistics(false, &bandMin, &bandMax, &bandMean, &bandStdDev, NULL, NULL);
vecLight.push_back(bandMean);
vecStdDev.push_back(bandStdDev);
vecSNR.push_back(bandMean / bandStdDev);
vecGrayMax.push_back(bandMax);
vecGrayMin.push_back(bandMin);
//4.计算图像熵
std::cout << "calculateTiffEntropy bandMax: " << bandMax;
double tiffEntropy = calculateTiffEntropy(pBuf[i], iwidth * iheight, bandMax);
vecEntropy.push_back(tiffEntropy);
}
//数组转到mat判断是否是裁剪过的
cv::Mat imageBand;
int imgW, imgH;
if (isCliped)
{
imgW = clipWidth;
imgH = clipHeight;
}
else
{
imgW = iwidth;
imgH = iheight;
}
if (dataType == GDT_Byte)
imageBand = cv::Mat(imgH, imgW, CV_8UC1, pBuf[i]);
else if (dataType == GDT_UInt16)
imageBand = cv::Mat(imgH, imgW, CV_16UC1, pBuf[i]);
else
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入影像数据类型无法用于计算客观参数"));
continue;
}
//else if(dataType == GDT_UInt32)
// imageBand = cv::Mat(iheigth, iwidth, CV_32SC1, pBuf[i]);
//3.计算对比度
double contrastRadio = calculateContrastRatio(imageBand);
vecContrastRadio.push_back(contrastRadio);
//6.计算陡度
double sobel = gradient_Sobel(imageBand);
vecGradient.push_back(sobel);
//7.计算laplation
double laplation = definition_Laplacian(imageBand);
vecDefinition.push_back(laplation);
//计算角二阶矩、图像熵
cv::Mat dst_0degree, dst_90degree, dst_45degree, dst_135degree;
get_GLCM_0deg(imageBand, dst_0degree);
get_GLCM_90deg(imageBand, dst_90degree);
get_GLCM_45deg(imageBand, dst_45degree);
get_GLCM_135deg(imageBand, dst_135degree);
//qDebug() << "--calculate asm band" << i + 1;
//vecASM.push_back(temp);
//qDebug() << "-----0 degree-----";
vecASM[i].push_back(calculateASM(dst_0degree));
//qDebug() << "-----90 degree-----";
vecASM[i].push_back(calculateASM(dst_90degree));
//qDebug() << "-----45 degree-----";
vecASM[i].push_back(calculateASM(dst_45degree));
//qDebug() << "-----135 degree-----";
vecASM[i].push_back(calculateASM(dst_135degree));
//9.计算PIQE
double piqe = calculatePIQEScore(imageBand);
vecPIQE.push_back(piqe);
delete[] bandBufClip;
delete[] bandBuf;
}
initTableWidget(14, bandCount);
/*
*
*
*
*
*
*
*
* PIQE
* 0°,90°,45°,135°
* --RER--
*/
int currentBandCount = ui.comboBoxBand->currentIndex();
ui.lineEdit_SNR->setText(QString::number(vecSNR[currentBandCount], 10, 4));
ui.lineEdit_PIQE->setText(QString::number(vecPIQE[currentBandCount], 10, 4));
imgLight = vecLight[currentBandCount];
imgStdDev = vecStdDev[currentBandCount];
imgContrastRadio = vecContrastRadio[currentBandCount];
imgEntropy = vecEntropy[currentBandCount];
imgSNR = vecSNR[currentBandCount];
imgGradient = vecGradient[currentBandCount];
imgDefinition = vecDefinition[currentBandCount];
imgASM = (vecASM[0][currentBandCount] + vecASM[currentBandCount][1] + vecASM[currentBandCount][2] + vecASM[currentBandCount][3]) / 4;
imgPIQE = vecPIQE[currentBandCount];
for (int i = 0; i < bandCount; i++)
{
setTableWidgetValue(vecLight[i], 0, i);
setTableWidgetValue(vecStdDev[i], 1, i);
setTableWidgetValue(vecContrastRadio[i], 2, i);
setTableWidgetValue(vecEntropy[i], 3, i);
setTableWidgetValue(vecSNR[i], 4, i);
setTableWidgetValue(vecGradient[i], 5, i);
setTableWidgetValue(vecDefinition[i], 6, i);
setTableWidgetValue(vecGrayMax[i], 7, i);
setTableWidgetValue(vecGrayMin[i], 8, i);
setTableWidgetValue(vecPIQE[i], 9, i);
setTableWidgetValue(vecASM[i][0], 10, i);
setTableWidgetValue(vecASM[i][1], 11, i);
setTableWidgetValue(vecASM[i][2], 12, i);
setTableWidgetValue(vecASM[i][3], 13, i);
}
//qDebug() << "----------------------------------------------------------\n";
//释放内存
GDALClose(poDataset);
QString outTxt;
//在输出文件夹生成txt
if (mXmlProductDir != "")
{
QDir productDir(mXmlProductDir);
if (!productDir.exists())
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("product.txt not exists"));
return;
}
outTxt = productDir.absoluteFilePath("product.txt");
//qDebug() << "product.txt: " << outTxt;
QFile txtFile(outTxt);
if (!txtFile.open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Truncate))//Truncate:覆盖文本///Append:在文本最后追加内容
return;
QDateTime time = QDateTime::currentDateTime();
QString currentTime = time.toString("yyyyMMddhhmmss");
QTextStream stream(&txtFile);
stream << QString::fromLocal8Bit("日期时间: %1\n").arg(currentTime);
stream << QString::fromLocal8Bit("文件路径: %1\n").arg(evaluateImg);
if (isCliped) {
stream << QString::fromLocal8Bit("图像宽度: %1, ").arg(clipWidth);
stream << QString::fromLocal8Bit("高度: %1\n").arg(clipHeight);
}
else {
stream << QString::fromLocal8Bit("图像宽度: %1, ").arg(iwidth);
stream << QString::fromLocal8Bit("高度: %1\n").arg(iheight);
}
for (int i = 0; i < bandCount; i++)
{
stream << QString::fromLocal8Bit("波段: %1\n").arg(i + 1);
stream << QString::fromLocal8Bit(" 图像灰度最大值: %1\n").arg(vecGrayMax[i]);
stream << QString::fromLocal8Bit(" 图像灰度最小值: %1\n").arg(vecGrayMin[i]);
stream << QString::fromLocal8Bit(" 图像亮 度: %1\n").arg(vecLight[i]);
stream << QString::fromLocal8Bit(" 图像标准差: %1\n").arg(vecStdDev[i]);
stream << QString::fromLocal8Bit(" 图像对比度: %1\n").arg(vecContrastRadio[i]);
stream << QString::fromLocal8Bit(" 图像熵: %1\n").arg(vecEntropy[i]);
stream << QString::fromLocal8Bit(" 图像信噪比: %1\n").arg(vecSNR[i]);
stream << QString::fromLocal8Bit(" 图像陡 度: %1\n").arg(vecGradient[i]);
stream << QString::fromLocal8Bit(" 图像清晰度: %1\n").arg(vecDefinition[i]);
stream << QString::fromLocal8Bit(" 图像PIQE: %1\n").arg(vecPIQE[i]);
stream << QString::fromLocal8Bit(" 图像0°二阶矩: %1\n").arg(vecASM[i][0]);
stream << QString::fromLocal8Bit(" 图像90°二阶矩: %1\n").arg(vecASM[i][1]);
stream << QString::fromLocal8Bit(" 图像45°二阶矩: %1\n").arg(vecASM[i][2]);
stream << QString::fromLocal8Bit(" 图像135°二阶矩: %1\n").arg(vecASM[i][3]);
}
stream << "\n";
txtFile.close();
ui.lineEdit_INFO->setText("result files save at:" + outTxt);
}
else
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("product.txt输出文件夹为空或不存在"));
}
}
//void NIIRS::calculateEvaluationImage()
//{
// QString evaluateImgPath = ui.lineEdit_img->text();
// // QStringList evaluateFiles = getAllFiles(evaluateImgPath, "BMP");
// // if (evaluateFiles.size() == 0)
// // {
// // ui.lineEdit_INFO->setText("No BMP files in folder:" + evaluateImgPath);
// // return;
// // }
// // QString textFileName = "D:\\A_testdata\\niirs_test\\tid2008\\a_distorted_images.txt";
// // QFile file(textFileName);
// // if (!file.open(QFile::Append | QFile::Text))
// // {
// // QMessageBox::warning(this, "warning", "Please select the correct file");
// // return;
// // }
// // QTextStream textStream(&file);
// // textStream << "imgName,imgLight,imgStdDev,imgContrastRadio,imgEntropy,imgSNR,imgGradient,imgDefinition,imgASM,imgPIQE,\n";
// // for (int i = 0; i < evaluateFiles.size(); i++)
// // {
// // cv::Mat evaluateImg = cv::imread(evaluateFiles[i].toStdString());
// cv::Mat evaluateImg = cv::imread(evaluateImgPath.toStdString());
// cv::Mat imgGray;
// if (evaluateImg.channels() == 3)
// cv::cvtColor(evaluateImg, imgGray, CV_BGR2GRAY);
// else
// imgGray = evaluateImg;
//
// cv::Scalar meanScalar, stdDevScalar;
// cv::meanStdDev(imgGray, meanScalar, stdDevScalar);
// QVector<double>paras;
// double imgLight = meanScalar.val[0];
// double imgStdDev = stdDevScalar.val[0];
// double imgContrastRadio = calculateContrastRatio(imgGray);
// double imgEntropy = calculateEntropy(imgGray);
// double imgSNR = calculateSNR(imgGray);
// double imgSobel = gradient_Sobel(imgGray);
// double imgLaplacian = definition_Laplacian(imgGray);
// paras.append(imgLight);
// paras.append(imgStdDev);
// paras.append(imgContrastRadio);
// paras.append(imgEntropy);
// paras.append(imgSNR);
// paras.append(imgSobel);
// paras.append(imgLaplacian);
//
// cv::Mat dst_0, dst_90, dst_45, dst_135;
// get_GLCM_0deg(imgGray, dst_0);
// get_GLCM_90deg(imgGray, dst_90);
// get_GLCM_45deg(imgGray, dst_45);
// get_GLCM_135deg(imgGray, dst_135);
// double degree_0 = calculateASM(dst_0);
// double degree_90 = calculateASM(dst_90);
// double degree_45 = calculateASM(dst_45);
// double degree_135 = calculateASM(dst_135);
// double imgASM = (degree_0 + degree_90 + degree_45 + degree_135) / 4.0;
// double imgPIQE = calculatePIQEScore(imgGray);
// paras.append(imgASM);
// paras.append(imgPIQE);
//
// // QFileInfo fileInfo(evaluateFiles[i]);
// // QString imgName = fileInfo.fileName();
// // textStream << imgName<<",";
// // for (int j = 0; j < paras.size(); j++)
// // textStream << QString::number(paras[j], 10, 6)<<",";
// // textStream << "\n";
// // }
// // file.close();
// // qDebug() << "calculateEvaluationImage done";
//}
double NIIRS::calculateNiirsScore()
{
calculateRER();
QString textGSDX = ui.lineEdit_GSDX->text();
QString textGSDY = ui.lineEdit_GSDY->text();
double GSDX, GSDY;
QString textRER = ui.lineEdit_RER->text();
QString text_H = ui.lineEdit_H->text();
double RER, MTFC_H;
QString textMTFC_G = ui.lineEdit_MTFC_G->text();
double MTFC_G;
QString textSNR = ui.lineEdit_SNR->text();
QString textPIQE = ui.lineEdit_PIQE->text();
double SNR, PIQE;
//1.检查GSD输入是否正确
if (textGSDX.toDouble() <= 0.0 || textGSDY.toDouble() <= 0.0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("检查GSD输入"));
return 0.0;
}
else
{
GSDX = textGSDX.toDouble();
GSDY = textGSDY.toDouble();
ui.lineEdit_INFO->clear();
}
//2.检查RER输入是否正确
if (textRER.toDouble() == 0.0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("请输入RER数据"));
return 0.0;
}
else
{
RER = textRER.toDouble();
ui.lineEdit_INFO->clear();
}
//3.检查MTFC_H输入是否正确
if (text_H.toDouble() < 0.0 || text_H.toDouble() > 1.90)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("检查几何过冲H输入"));
return 0.0;
}
else
{
MTFC_H = text_H.toDouble();
ui.lineEdit_INFO->clear();
}
//4.检查MTFC_G输入是否正确
if (textMTFC_G.toDouble() < 1.0 || textMTFC_G.toDouble() > 19.0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("检查噪声增益G输入"));
return 0.0;
}
else
{
MTFC_G = textMTFC_G.toDouble();
ui.lineEdit_INFO->clear();
}
//5.检查SNR输入是否正确
if (textSNR.toDouble() < 0.0 || textSNR.toDouble() > 130.0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("检查信噪比SNR输入"));
return 0.0;
}
else
{
SNR = textSNR.toDouble();
ui.lineEdit_INFO->clear();
}
//6.检查PIQE输入是否正确
if (textPIQE.toDouble() == 0.0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("检查PIQE"));
return 0.0;
}
else
{
PIQE = textPIQE.toDouble();
ui.lineEdit_INFO->clear();
}
double NIIRS = 0.0;
double formulaPara = 0.0;
double GSD_inch;
if (ui.radioVisible->isChecked())
formulaPara = 10.251;
else if (ui.radioMidInfrared->isChecked() || ui.radioLonInfrared->isChecked())
formulaPara = 10.751;
//需要将米转换到英寸
GSD_inch = sqrtf(GSDX * GSDY) * 39.3700787402;
//RER>=0.9:a=3.32,b=1.559;RER<0.9:a=3.16,b=2.817
double para_a = RER >= 0.9 ? 3.32 : 3.16;
double para_b = RER >= 0.9 ? 1.559 : 2.817;
//qDebug() << "--a:" << a << ", b:" << b << ",para:" << para;
//可见10.251 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR)
//红外10.751 - a*lgGSD + b*lgRER - 0.656*H - 0.334*(G/SNR)
NIIRS = formulaPara - (para_a * log10(GSD_inch)) + (para_b * log10(RER)) - (0.656 * MTFC_H) - (0.334 * (MTFC_G / SNR));
ui.lineEdit_NIIRS->setText(QString::number(NIIRS, 10, 4));
QString outXMLPath = mResultXmlDir;
std::cout << outXMLPath.toStdString() << std::endl;
if (outXMLPath == "")
QMessageBox::information(this, tr("information"), tr("The Product Dir is Empty"));
else
{
//outXMLPath = outXMLPath + "/result.xml";
//ui.lineEdit_INFO->setText(outXMLPath);
std::cout << outXMLPath.toStdString() << std::endl;
QFile file(outXMLPath);
if (!file.open(QIODevice::WriteOnly | QIODevice::Truncate))
{//之前的内容被清空
qDebug() << QStringLiteral("打开") << outXMLPath << QStringLiteral("文件失败!");
return 0.0;
}
QDomDocument doc;
QDomProcessingInstruction processInstruction = doc.createProcessingInstruction("xml", "version=\"1.0\" encoding=\"UTF-8\"");
doc.appendChild(processInstruction);
//根节点
QDomElement root = doc.createElement("Root");
doc.appendChild(root);
{
//节点1 Metadata
QDomElement paraMetadata = doc.createElement("Metadata");
root.appendChild(paraMetadata);
QDomElement paraMeta1 = doc.createElement("Path");
QDomText paraMeta1Text = doc.createTextNode(ui.lineEdit_img->text());
paraMeta1.appendChild(paraMeta1Text);
paraMetadata.appendChild(paraMeta1);
QDomElement paraMeta2 = doc.createElement("Time");
QDomText paraMeta2Text = doc.createTextNode("2023.02.14");
paraMeta2.appendChild(paraMeta2Text);
paraMetadata.appendChild(paraMeta2);
QDomElement paraMetaLTx = doc.createElement("LeftTopX");
QDomText paraMetaLTxText = doc.createTextNode(mXmlTopLeftX);
paraMetaLTx.appendChild(paraMetaLTxText);
paraMetadata.appendChild(paraMetaLTx);
QDomElement paraMetaLTy = doc.createElement("LeftTopY");
QDomText paraMetaLTyText = doc.createTextNode(mXmlTopLeftY);
paraMetaLTy.appendChild(paraMetaLTyText);
paraMetadata.appendChild(paraMetaLTy);
QDomElement paraMetaRBx = doc.createElement("RightBottomX");
QDomText paraMetaRBxText = doc.createTextNode(mXmlBottomRightX);
paraMetaRBx.appendChild(paraMetaRBxText);
paraMetadata.appendChild(paraMetaRBx);
QDomElement paraMetaRBy = doc.createElement("RightBottomY");
QDomText paraMetaRByText = doc.createTextNode(mXmlBottomRightY);
paraMetaRBy.appendChild(paraMetaRByText);
paraMetadata.appendChild(paraMetaRBy);
}
{
//节点2 NIIRS参数及结果
QDomElement paraResultNiirs = doc.createElement("NIIRS");
root.appendChild(paraResultNiirs);
QDomElement paraNiirsPara = doc.createElement("Parameters");
paraResultNiirs.appendChild(paraNiirsPara);
QDomElement paraGSDX = doc.createElement("GSDX");
QDomText paraGSDXText = doc.createTextNode(ui.lineEdit_GSDX->text());
paraGSDX.appendChild(paraGSDXText);
paraNiirsPara.appendChild(paraGSDX);
QDomElement paraGSDY = doc.createElement("GSDY");
QDomText paraGSDYText = doc.createTextNode(ui.lineEdit_GSDY->text());
paraGSDY.appendChild(paraGSDYText);
paraNiirsPara.appendChild(paraGSDY);
QDomElement paraRER = doc.createElement("RER");
QDomText paraRERTest = doc.createTextNode(ui.lineEdit_RER->text());
paraRER.appendChild(paraRERTest);
paraNiirsPara.appendChild(paraRER);
QDomElement paraH = doc.createElement("H");
QDomText paraHText = doc.createTextNode(ui.lineEdit_H->text());
paraH.appendChild(paraHText);
paraNiirsPara.appendChild(paraH);
QDomElement paraG = doc.createElement("NoiseGain");
QDomText paraGText = doc.createTextNode(ui.lineEdit_MTFC_G->text());
paraG.appendChild(paraGText);
paraNiirsPara.appendChild(paraG);
QDomElement paraSNR = doc.createElement("SNR");
QDomText paraSNRText = doc.createTextNode(ui.lineEdit_SNR->text());
paraSNR.appendChild(paraSNRText);
paraNiirsPara.appendChild(paraSNR);
QDomElement paraNiirsPIQE = doc.createElement("PIQE");
QDomText paraNiirsPIQEText = doc.createTextNode(ui.lineEdit_PIQE->text());
paraNiirsPIQE.appendChild(paraNiirsPIQEText);
paraNiirsPara.appendChild(paraNiirsPIQE);
QDomElement paraNiirsScore = doc.createElement("Result");
paraResultNiirs.appendChild(paraNiirsScore);
QDomElement paraTiffType = doc.createElement("Type");
QDomText paraTiffTypeText;
if (ui.radioVisible->isChecked())
paraTiffTypeText = doc.createTextNode("Visible");
else if (ui.radioMidInfrared->isChecked())
paraTiffTypeText = doc.createTextNode("MiddleInfrared");
else if (ui.radioLonInfrared->isChecked())
paraTiffTypeText = doc.createTextNode("LongWaveInfrared");
paraTiffType.appendChild(paraTiffTypeText);
paraNiirsScore.appendChild(paraTiffType);
QDomElement paraScore = doc.createElement("NiirsScore");
QDomText paraScoreText = doc.createTextNode(ui.lineEdit_NIIRS->text());
paraScore.appendChild(paraScoreText);
paraNiirsScore.appendChild(paraScore);
}
{
//节点3 十个客观参数
QDomElement paraObjectiveParameter = doc.createElement("ObjectiveParameter");
root.appendChild(paraObjectiveParameter);
QDomElement paraLight = doc.createElement("ImageLight");
QDomText paraLightText = doc.createTextNode(QString::number(imgLight, 10, 6));
paraLight.appendChild(paraLightText);
paraObjectiveParameter.appendChild(paraLight);
QDomElement paraStdDev = doc.createElement("ImageStdDev");
QDomText paraStdDevText = doc.createTextNode(QString::number(imgStdDev, 10, 6));
paraStdDev.appendChild(paraStdDevText);
paraObjectiveParameter.appendChild(paraStdDev);
QDomElement paraContrastRadio = doc.createElement("ImageContrastRadio");
QDomText paraContrastRadioText = doc.createTextNode(QString::number(imgContrastRadio, 10, 6));
paraContrastRadio.appendChild(paraContrastRadioText);
paraObjectiveParameter.appendChild(paraContrastRadio);
QDomElement paraEntropy = doc.createElement("ImageEntropy");
QDomText paraEntropyText = doc.createTextNode(QString::number(imgEntropy, 10, 6));
paraEntropy.appendChild(paraEntropyText);
paraObjectiveParameter.appendChild(paraEntropy);
QDomElement paraSNR = doc.createElement("ImageSNR");
QDomText paraSNRText = doc.createTextNode(QString::number(imgSNR, 10, 6));
paraSNR.appendChild(paraSNRText);
paraObjectiveParameter.appendChild(paraSNR);
QDomElement paraRER = doc.createElement("ImageRER");
QDomText paraRERText = doc.createTextNode(QString::number(imgRER, 10, 6));
paraRER.appendChild(paraRERText);
paraObjectiveParameter.appendChild(paraRER);
QDomElement paraGradient = doc.createElement("ImageGradient");
QDomText paraGradientText = doc.createTextNode(QString::number(imgGradient, 10, 6));
paraGradient.appendChild(paraGradientText);
paraObjectiveParameter.appendChild(paraGradient);
QDomElement paraDefinition = doc.createElement("ImageDefinition");
QDomText paraDefinitionText = doc.createTextNode(QString::number(imgDefinition, 10, 6));
paraDefinition.appendChild(paraDefinitionText);
paraObjectiveParameter.appendChild(paraDefinition);
QDomElement paraASM = doc.createElement("ImageASM");
QDomText paraASMText = doc.createTextNode(QString::number(imgASM, 10, 6));
paraASM.appendChild(paraASMText);
paraObjectiveParameter.appendChild(paraASM);
QDomElement paraPIQE = doc.createElement("ImagePIQE");
QDomText paraPIQEText = doc.createTextNode(QString::number(imgPIQE, 10, 6));
paraPIQE.appendChild(paraPIQEText);
paraObjectiveParameter.appendChild(paraPIQE);
}
QTextStream outFile(&file);
doc.save(outFile, 4);//缩进4格
file.close();
ui.lineEdit_INFO->setText("result files save at:" + outXMLPath);
}
return NIIRS;
}
double NIIRS::calculateRER()
{
ui.lineEdit_INFO->clear();
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO");
//QString inEdgeImg = mXmlImgPath;
QString inEdgeImg = ui.lineEdit_img->text();
//ui.lineEdit_INFO->setText(inEdgeImg);
//double imgRERValue = 0.0, imgHValue = 0.0;
cv::Mat imgTarget;
if (inEdgeImg == "")
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("无刃边数据"));
return 0.0;
}
string tiffEdgePath = inEdgeImg.toStdString();
GDALDataset* poDataset;
poDataset = (GDALDataset*)GDALOpen(tiffEdgePath.c_str(), GA_ReadOnly);
if (poDataset == nullptr)
{
GDALClose(poDataset);
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("刃边图像无法解析"));
return 0.0;
}
//GDALRasterBand* rasterBand = poDataset->GetRasterBand(1);
int currentBand = ui.comboBoxBand->currentIndex();
GDALRasterBand* rasterBand = poDataset->GetRasterBand(currentBand + 1);
GDALDataType dataType = rasterBand->GetRasterDataType();
int inImgWidth = rasterBand->GetXSize();
int inImgHeight = rasterBand->GetYSize();
//int topLeftX = mXmlTopLeftX.toInt();
//int topLeftY = mXmlTopLeftY.toInt();
//int bottomRightX = mXmlBottomRightX.toInt();
//int bottomRightY = mXmlBottomRightY.toInt();
int topLeftX = ui.lineEditLTx->text().toInt();
int topLeftY = ui.lineEditLTy->text().toInt();
int bottomRightX = ui.lineEditRBx->text().toInt();
int bottomRightY = ui.lineEditRBy->text().toInt();
int clipWidth = bottomRightX - topLeftX;
int clipHeight = bottomRightY - topLeftY;
if (clipWidth == 0 && clipHeight == 0)
{
clipWidth = inImgWidth;
clipHeight = inImgHeight;
}
//读取x方向刃边图
if (dataType == GDT_Byte)
{
unsigned char* bandBuf = new unsigned char[clipWidth * clipHeight];
if (rasterBand->RasterIO(GF_Read, topLeftX, topLeftY, clipWidth, clipHeight, bandBuf, clipWidth, clipHeight, dataType, 0, 0) != 0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
return -1.0;
}
imgTarget = cv::Mat(clipHeight, clipWidth, CV_8UC1, bandBuf);
}
else if (dataType == GDT_UInt16)
{
unsigned short* bandBuf = new unsigned short[clipWidth * clipHeight];
if (rasterBand->RasterIO(GF_Read, topLeftX, topLeftY, clipWidth, clipHeight, bandBuf, clipWidth, clipHeight, dataType, 0, 0) != 0)
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
return -1.0;
}
cv::Mat img = cv::Mat(clipHeight, clipWidth, CV_16UC1, bandBuf);
double min, max;
cv::minMaxLoc(img, &min, &max);
img.convertTo(imgTarget, CV_8UC1, 255.0 / max, 0);
}
else
{
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入影像数据类型无法用于RER计算"));
return -1.0;
}
GDALClose(poDataset);
//根据两点计算斜率k判断为x方向或y方向
//double slopeK = 0.0;
//QPair<double, double>edgeVals;
//if (slopeK >= -1 && slopeK <= 1)
// edgeVals = calculateERx(imgTarget);
//else
// edgeVals = calculateERy(imgTarget);
//imgRERValue = edgeVals.first;
//imgHValue = edgeVals.second;
//ui.lineEdit_RER->setText(QString::number(imgRERValue, 10, 4));
//ui.lineEdit_H->setText(QString::number(imgHValue, 10, 4));
cv::Mat imgGray, imgGrayX4;
if (imgTarget.channels() == 3)
cv::cvtColor(imgTarget, imgGray, CV_BGR2GRAY);
else
imgGray = imgTarget;
cv::pyrUp(imgGray, imgGrayX4);
cv::pyrUp(imgGrayX4, imgGrayX4);//重采样为原来4倍用于1/4像元大小的超采样
cv::Mat imgThres;
cv::threshold(imgGrayX4, imgThres, 130, 255, cv::THRESH_BINARY);
QString thresholdPath = mXmlProductDir + "/threshold.bmp";
cv::imwrite(thresholdPath.toStdString().c_str(), imgThres);
cv::Mat imgCanny;
cv::Canny(imgThres, imgCanny, 100, 200);//canny边缘提取
QString cannyPath = mXmlProductDir + "/canny.bmp";
cv::imwrite(cannyPath.toStdString().c_str(), imgCanny);
vector<cv::Vec4i>R_lines;
//cv::HoughLinesP(imgCanny, R_lines, 1, CV_PI / 180, 100, 25, 100);//提取直线
cv::HoughLinesP(imgCanny, R_lines, 1, CV_PI / 180, 50, 50, 10);//提取直线
QMap<double, double>map_rer_h;//key RER, value H
if (R_lines.size() == 0)
{
//qDebug() << "find no line in edge x";
QMessageBox::information(this, "information", "find no edge in image\n" + inEdgeImg);
//ui.lineEdit_INFO->setText(QString::fromLocal8Bit("无法从输入数据中提取刃边"));
return 0.0;
}
double maxRER = 0.0;
omp_set_num_threads(8);
#pragma omp parallel for
for (int z = 0; z < R_lines.size(); z++)
{
cv::Vec4i p = R_lines[z];
cv::Point pointA = cv::Point(p[0], p[1]);
cv::Point pointB = cv::Point(p[2], p[3]);
QPair<double, double>rer_h = calculate_rer_h(imgGrayX4, pointA, pointB);
if (rer_h.first > maxRER)
{
mRerEdgePointA = pointA;
mRerEdgePointB = pointB;
}
map_rer_h.insert(rer_h.first, rer_h.second);
}
QString edgeOutPath = mXmlProductDir + "/edges.bmp";
QString outTxt;
//在输出文件夹生成txt
QDir productDir(mXmlProductDir);
outTxt = productDir.absoluteFilePath("edges.txt");
QFile txtFile(outTxt);
txtFile.open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Truncate);//Truncate:覆盖文本///Append:在文本最后追加内容
QTextStream stream(&txtFile);
stream << QString("max rer edge: LTx:%1, LTy:%2; RBx:%3, RBy:%4\n")
.arg(mRerEdgePointA.x).arg(mRerEdgePointA.y).arg(mRerEdgePointB.x).arg(mRerEdgePointB.y);
cv::cvtColor(imgGrayX4, imgGrayX4, CV_GRAY2BGR);
for (int z = 0; z < R_lines.size(); z++)
{
cv::Vec4i p = R_lines[z];
cv::Point pointA = cv::Point(p[0], p[1]);
cv::Point pointB = cv::Point(p[2], p[3]);
cv::line(imgGrayX4, pointA, pointB, cv::Scalar(255, 255, 0));
stream << QString("edge:%1, LTx:%2, LTy:%3; RBx:%4, RBy:%5\n").arg(z).arg(p[0]).arg(p[1]).arg(p[2]).arg(p[3]);
}
stream << "\n";
txtFile.close();
cv::imwrite(edgeOutPath.toStdString().c_str(), imgGrayX4);
maxRER = map_rer_h.lastKey();
double maxH = map_rer_h.value(maxRER);
ui.lineEdit_RER->setText(QString::number(maxRER, 10, 4));
ui.lineEdit_H->setText(QString::number(maxH, 10, 4));
imgRER = maxRER;
return maxRER;
/////////////////////////////////////////////////////////////////////////////////////////////
//QString pathTargetX = mEdgeXPath;
//QString pathTargetY = mEdgeYPath;
////cv::Mat imgTargetX, imgTargetY;
//if (pathTargetX == "" || pathTargetY == "")
//{
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("未打开刃边数据"));
// return 0.0;
//}
////GDALAllRegister();
////CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO");
//string tiffXPath = pathTargetX.toStdString();
//string tiffYPath = pathTargetY.toStdString();
//GDALDataset* poDatasetX;
//GDALDataset* poDatasetY;
//poDatasetX = (GDALDataset*)GDALOpen(tiffXPath.c_str(), GA_ReadOnly);
//poDatasetY = (GDALDataset*)GDALOpen(tiffYPath.c_str(), GA_ReadOnly);
//if (NULL == poDatasetX)
//{
// GDALClose(poDatasetX);
// GDALClose(poDatasetY);
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("X方向刃边图像无法解析"));
// return -1.0;
//}
//if (NULL == poDatasetY)
//{
// GDALClose(poDatasetX);
// GDALClose(poDatasetY);
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("Y方向刃边图像无法解析"));
// return -1.0;
//}
//GDALRasterBand* rasterXBand = poDatasetX->GetRasterBand(1);
//GDALRasterBand* rasterYBand = poDatasetY->GetRasterBand(1);
//GDALDataType dataTypeX = rasterXBand->GetRasterDataType();
//GDALDataType dataTypeY = rasterYBand->GetRasterDataType();
//int xwidth = rasterXBand->GetXSize();
//int xheight = rasterXBand->GetYSize();
//int ywidth = rasterYBand->GetXSize();
//int yheight = rasterYBand->GetYSize();
//cv::Mat imgTargetX, imgTargetY;
////读取x方向刃边图
//if (dataTypeX == GDT_Byte)
//{
// unsigned char* bandBuf = new unsigned char[xwidth * xheight];
// if (rasterXBand->RasterIO(GF_Read, 0, 0, xwidth, xheight, bandBuf, xwidth, xheight, dataTypeX, 0, 0) != 0)
// {
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
// return -1.0;
// }
// imgTargetX = cv::Mat(xheight, xwidth, CV_8UC1, bandBuf);
//}
//else if (dataTypeX == GDT_UInt16)
//{
// unsigned short* bandBuf = new unsigned short[xwidth * xheight];
// if (rasterXBand->RasterIO(GF_Read, 0, 0, xwidth, xheight, bandBuf, xwidth, xheight, dataTypeX, 0, 0) != 0)
// {
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
// return -1.0;
// }
// cv::Mat img = cv::Mat(xheight, xwidth, CV_16UC1, bandBuf);
// double min, max;
// cv::minMaxLoc(img, &min, &max);
// img.convertTo(imgTargetX, CV_8UC1, 255.0 / max, 0);
// //cv::imwrite("D:\\A_testdata\\niirs_test\\imgTargetX.png", imgTargetX);
//}
//else
//{
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入影像数据类型无法用于RER计算"));
// return -1.0;
//}
////读取y方向刃边图
//if (dataTypeY == GDT_Byte)
//{
// unsigned char* bandBuf = new unsigned char[ywidth * yheight];
// if (rasterYBand->RasterIO(GF_Read, 0, 0, ywidth, yheight, bandBuf, ywidth, yheight, dataTypeY, 0, 0) != 0)
// {
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
// return -1.0;
// }
// imgTargetY = cv::Mat(yheight, ywidth, CV_8UC1, bandBuf);
//}
//else if (dataTypeY == GDT_UInt16)
//{
// unsigned short* bandBuf = new unsigned short[ywidth * yheight];
// if (rasterYBand->RasterIO(GF_Read, 0, 0, ywidth, yheight, bandBuf, ywidth, yheight, dataTypeY, 0, 0) != 0)
// {
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入数据解析错误"));
// return -1.0;
// }
// cv::Mat img = cv::Mat(yheight, ywidth, CV_16UC1, bandBuf);
// double min, max;
// cv::minMaxLoc(img, &min, &max);
// img.convertTo(imgTargetY, CV_8UC1, 255.0 / max, 0);
// //cv::imwrite("D:\\A_testdata\\niirs_test\\imgTargetY.png", imgTargetY);
//}
//else
//{
// ui.lineEdit_INFO->setText(QString::fromLocal8Bit("输入影像数据类型无法用于RER计算"));
// return -1.0;
//}
//double imgRER, imgH;
//QPair<double, double>edgeXVals = calculateERx(imgTargetX);
//imgRER = edgeXVals.first;
//imgH = edgeXVals.second;
//ui.lineEdit_RER->setText(QString::number(imgRER, 10, 4));
//ui.lineEdit_H->setText(QString::number(imgH, 10, 4));
//qDebug() << "--6.img RER: " << imgRER;
//qDebug() << "----img H:" << imgH;
//GDALClose(poDatasetX);
//GDALClose(poDatasetY);
//return imgRER;
}
double NIIRS::calculateMeanStdDev(cv::Mat img)
{
GDALAllRegister();
CPLSetConfigOption("GDAL_FILENAME_IS_UTF8", "NO"); //中文路径
//gdal读取待评价影像逐波段计算平均值计算亮度
QString evaluateImg = ui.lineEdit_img->text();
if (evaluateImg.isEmpty())
{
QMessageBox::warning(this, "warning", "The image to be evaluated is not opened");
return -1;
}
string tiffPath = evaluateImg.toStdString();
GDALDataset* hSrcDS;
hSrcDS = (GDALDataset*)GDALOpen(tiffPath.c_str(), GA_ReadOnly);
if (NULL == hSrcDS)
{
GDALClose(hSrcDS);
return -1;
}
GDALRasterBand* rasterBand;
qDebug() << "--Img Light&StdDev";
for (int i = 0; i < hSrcDS->GetRasterCount(); i++)
{
rasterBand = hSrcDS->GetRasterBand(i + 1);
double bandMin = 0;
double bandMax = 0;
double bandMean = 0;
double bandStdDev = 0;
rasterBand->ComputeStatistics(false, &bandMin, &bandMax, &bandMean, &bandStdDev, NULL, NULL);
qDebug() << "---band:" << i + 1 << endl << "----Light:" << bandMean << endl << "----StdDev:" << bandStdDev;
}
delete hSrcDS;
return 0.0;
}
//float NIIRS::calculateStdDev(cv::Mat img)
//{
// //转为灰度图
// cv::Mat imgGray;
// if (img.channels() == 3)
// cv::cvtColor(img, imgGray, CV_BGR2GRAY);
// else
// imgGray = img;
//
// cv::Scalar meanScalar, stdDevScalar;
// cv::meanStdDev(imgGray, meanScalar, stdDevScalar);
//
// float imgStdDev = stdDevScalar.val[0];
// return imgStdDev;
//}
double NIIRS::calculateTiffEntropy(unsigned short* bandBuf, int bufSize, int maxVal)
{
//获取最大值赋值temp用于计算每级亮度累加值使用vector
//1.计算每个亮度累加值
//遍历数组每个像元的亮度使对应vector位置的数量+1
QVector<double>temp(maxVal + 1);
std::cout << "max:" << maxVal + 1 << std::endl;
for (int i = 0; i < bufSize; i++)
{
auto pos = bandBuf[i];
int tempAddr = pos / 1;
temp[tempAddr] = temp[tempAddr] + 1;
}
//2.计算每个亮度的概率
for (int i = 0; i < temp.size(); i++)
temp[i] = temp[i] / double(bufSize);
//3.计算图像信息熵
float result = 0;
for (int i = 0; i < temp.size(); i++)
{
if (temp[i] == 0.0)
result = result;
else
result = result - temp[i] * (std::log(temp[i]) / std::log(2.0));//公式
}
return result;
}
//double NIIRS::calculateEntropy(cv::Mat img)
//{
// double temp[256] = { 0.0 };
//
// //计算每个像素的累积值
// for (int m = 0; m < img.rows; m++)
// {
// const uchar* t = img.ptr<uchar>(m);
// for (int n = 0; n < img.cols; n++)
// {
// int i = t[n];
// temp[i] = temp[i] + 1;
// }
// }
// //计算每个像素的概率
// for (int i = 0; i < 256; i++)
// {
// temp[i] = temp[i] / (img.rows * img.cols);
// }
// float result = 0;
// //计算图像信息熵
// for (int i = 0; i < 256; i++)
// {
// if (temp[i] == 0.0)
// result = result;
// else
// result = result - temp[i] * (log(temp[i]) / log(2.0));
// }
//
// return result;
//}
double NIIRS::calculateContrastRatio(cv::Mat img)
{
cv::Mat imgGray;
if (img.channels() == 3)
cv::cvtColor(img, imgGray, CV_BGR2GRAY);
else
imgGray = img;
cv::Mat imgExtend;
cv::copyMakeBorder(imgGray, imgExtend, 1, 1, 1, 1, cv::BORDER_REPLICATE);//以边缘值四周各扩充1像元
imgExtend = imgExtend / 1.0;
int row = imgExtend.rows;
int col = imgExtend.cols;
double bb = 0.0;
for (int i = 1; i < row - 1; i++)
{
for (int j = 1; j < col - 1; j++)
{
double center = imgExtend.at<uchar>(i, j);
bb += pow(center - imgExtend.at<uchar>(i, j - 1), 2) + pow(center - imgExtend.at<uchar>(i, j + 1), 2)
+ pow(center - imgExtend.at<uchar>(i - 1, j), 2) + pow(center - imgExtend.at<uchar>(i + 1, j), 2);
}
}
double cr = bb / (4 * (row - 2) * (col - 2) + 3 * (2 * (row - 2) + 2 * (col - 2)) + 2 * 4);
return cr;
}
//double NIIRS::calculateSNR(cv::Mat img)
//{
// cv::Mat imgGray;
// if (img.channels() == 3)
// cv::cvtColor(img, imgGray, CV_BGR2GRAY);
// else
// imgGray = img;
//
// cv::Scalar meanScalar, stdDevScalar;
// cv::meanStdDev(imgGray, meanScalar, stdDevScalar);
// double snr = meanScalar.val[0] / stdDevScalar.val[0];
// return snr;
//}
//float NIIRS::SFRCalculation(cv::Mat& ROI, double gamma)
//{
// if (ROI.empty())
// {
// std::cerr << "Open the ROI image error" << std::endl;
// return 0;
// }
// int height = ROI.rows, width = ROI.cols;
// //进行伽马解码
// de_Gamma(ROI, gamma);
// int i, j;
// double slope = 0, intercept = 0;
// //中心质心偏移Center centroid offset
// double CCoffset = 0;
// std::vector<double> y_shifts(height);
// //计算图像中心的质心和质心之间的偏移
// std::vector<double> Cen_Shifts = CentroidFind(ROI, y_shifts, &CCoffset);
// if (Cen_Shifts.empty())
// return 0;
// //斜边拟合,线性回归
// SLR(Cen_Shifts, y_shifts, &intercept, &slope);
// //将数据行数截断为最大斜率周期,该周期将具有整数个完整相位旋转
// ReduceRows(slope, &height);
// //将CCoffset更新为图像的原始中点与计算的参考中点之间的偏移
// CCoffset = CCoffset + 0.5 + intercept - width / 2;
// //进行过采样
// int SamplingLen = width * 4;
// std::vector<double> OverSamplingData = OverSampling(ROI, slope, CCoffset, height, width, &SamplingLen);
// //利用汉明窗对数据两侧的纹波信号进行滤波
// OverSamplingData = HammingWindows(OverSamplingData, SamplingLen);
// //离散傅里叶变换
// DFT(OverSamplingData, SamplingLen);
// width = int(SamplingLen / 4);
// double maxData = 0;
// for (i = 0; i < SamplingLen; ++i)
// {
// if (OverSamplingData[i] != 0)
// {
// maxData = OverSamplingData[i];
// break;
// }
// }
// for (int i = 0; i < SamplingLen; ++i)
// OverSamplingData[i] /= maxData;
// vector <pair<double, double>>vec_mtf;
// for (i = 0; i <= width; ++i)
// {
// double frequency = (double)i / width;
// pair<double, double>mtf_single;
// mtf_single.first = frequency;
// mtf_single.second = OverSamplingData[i];
// vec_mtf.push_back(mtf_single);
// //mtf_file << frequency << "," << OverSamplingData[i] << "\n";
// }
//
//
// return 0.0;
//}
//void NIIRS::DFT(std::vector<double>& data, int size)
//{
// int i, j;
// std::complex<double>* arr = new std::complex<double>[size];
// for (i = 0; i < size; ++i)
// {
// arr[i] = std::complex<double>(data[i], 0);
// }
//
// for (i = 0; i < size / 2.0; ++i)
// {
// std::complex<double> temp = 0;
// for (j = 0; j < size; ++j)
// {
// double w = 2 * 3.1415 * i * j / size;
// std::complex<double> deg(cos(w), -sin(w));
// temp += arr[j] * deg;
// }
// data[i] = sqrt(temp.real() * temp.real() + temp.imag() * temp.imag());
// }
//}
//std::vector<double> NIIRS::HammingWindows(std::vector<double>& deSampling, int SamplingLen)
//{
// int i, j;
// std::vector<double> tempData(SamplingLen);
//
// //We want to shift the peak data to the center of line spread function data
// //Because we will do the hamming window later, this will keep the important data away from filtering
// //In case there are two peaks, we use two variable to record the peak data position
// int L_location = -1, R_location = -1;
//
// double SamplingMax = 0;
// for (i = 0; i < SamplingLen; ++i)
// {
// if (fabs(deSampling[i]) > fabs(SamplingMax))
// SamplingMax = deSampling[i];
// }
//
// for (i = 0; i < SamplingLen; ++i)
// {
// if (deSampling[i] == SamplingMax)
// {
// if (L_location < 0) { L_location = i; }
// R_location = i;
// }
// }
//
// //the shift amount
// int PeakOffset = (R_location + L_location) / 2 - SamplingLen / 2;
//
// if (PeakOffset)
// {
// for (i = 0; i < SamplingLen; ++i)
// {
// int newIndex = i - PeakOffset;
// if (newIndex >= 0 && newIndex < SamplingLen) { tempData[newIndex] = deSampling[i]; }
// }
// }
// else
// {
// for (i = 0; i < SamplingLen; ++i) { tempData[i] = deSampling[i]; }
// }
//
// //do the hamming window filtering
// for (int i = 0; i < SamplingLen; ++i)
// {
// tempData[i] = tempData[i] * (0.54 - 0.46 * cos(2 * CV_PI * i / (SamplingLen - 1)));
// }
//
// return tempData;
//}
//std::vector<double> NIIRS::OverSampling(cv::Mat& Src, double slope, double CCoffset, int height, int width, int* SamplingLen)
//{
// std::vector<double> RowShifts(height, 0);
//
// int i, j, k;
// int halfY = height >> 1;
//
// //calculate the pixel shift of each row to align the centroid of each row as close as possible
// for (i = 0; i < height; ++i) { RowShifts[i] = (double)(i - halfY) / slope + CCoffset; }
//
// //DataMap -> to record the index of pixel after shift
// //Datas -> to record the original pixel value
// std::vector<double> DataMap(height * width, 0);
// std::vector<double> Datas(height * width, 0);
//
// for (i = 0, k = 0; i < height; ++i)
// {
// int baseindex = width * i;
// for (j = 0; j < width; ++j)
// {
// DataMap[baseindex + j] = j - RowShifts[i];
// Datas[baseindex + j] = int(Src.at<uchar>(i, j));
// }
// }
//
// std::vector<double> SamplingBar(*SamplingLen, 0);
// std::vector<int> MappingCount(*SamplingLen, 0);
//
// //Start to mapping the original data to 4x sampling data and record the count of each pixel in 4x sampling data
// for (i = 0; i < height * width; ++i)
// {
// int MappingIndex = static_cast<int>(4 * DataMap[i]);
// if (MappingIndex >= 0 && MappingIndex < *SamplingLen)
// {
// SamplingBar[MappingIndex] = SamplingBar[MappingIndex] + Datas[i];
// MappingCount[MappingIndex]++;
// }
// }
//
// //average the pixel value in 4x sampling data, if the pixel value in pixel is zero, copy the value of close pixel
// for (i = 0; i < *SamplingLen; ++i) {
// j = 0;
// k = 1;
// if (MappingCount[i] == 0) {
//
// if (i == 0) {
// while (!j)
// {
// if (MappingCount[i + k] != 0)
// {
// SamplingBar[i] = SamplingBar[i + k] / ((double)MappingCount[i + k]);
// j = 1;
// }
// else ++k;
// }
// }
// else {
// while (!j && ((i - k) >= 0))
// {
// if (MappingCount[i - k] != 0)
// {
// SamplingBar[i] = SamplingBar[i - k]; /* Don't divide by counts since it already happened in previous iteration */
// j = 1;
// }
// else ++k;
// }
// if ((i - k) < 0)
// {
// k = 1;
// while (!j)
// {
// if (MappingCount[i + k] != 0)
// {
// SamplingBar[i] = SamplingBar[i + k] / ((double)MappingCount[i + k]);
// j = 1;
// }
// else ++k;
// }
// }
// }
// }
// else
// SamplingBar[i] = (SamplingBar[i]) / ((double)MappingCount[i]);
// }
//
// // reduce the length of sampling data
// // because the datas at the edge are only matters, we truncate the data close to the two side of sampling data
// // which has very small contribution to the result
//
// //truncating the data smaller than 10% and bigger than 90% of original length
// int originalSamplingLen = *SamplingLen;
// *SamplingLen = *SamplingLen * 0.8;
//
// std::vector<double> deSampling(*SamplingLen, 0);
// //derivative sampling data(which is ESF) to get the line spread function
// for (i = originalSamplingLen * 0.1, j = 1; i < originalSamplingLen, j < *SamplingLen; ++i, ++j)
// {
// deSampling[j] = SamplingBar[i + 1] - SamplingBar[i];
// }
//
// return deSampling;
//}
//void NIIRS::ReduceRows(double slope, int* ImgHeight)
//{
// double tempSlope = fabs(slope);
// int cycs = (*ImgHeight) / tempSlope;
// if (tempSlope * cycs < *ImgHeight) { *ImgHeight = tempSlope * cycs; }
//}
//void NIIRS::SLR(std::vector<double>& Cen_Shifts, std::vector<double>& y_shifts, double* a, double* b)
//{
// //a -> intercept, b->slope of slanted edge
//
// int y_size = y_shifts.size();
// //using simple linear regression to solve equation and get slope and intercept
// double xsquare = 0, xavg = 0, yavg = 0;
// int i;
// for (i = 0; i < y_size; ++i)
// {
// yavg += y_shifts[i];
// xavg += Cen_Shifts[i];
// }
// xavg /= (double)y_size;
// yavg /= (double)y_size;
//
// //simple linear regession
// for (i = 0; i < y_size; ++i)
// {
// double temp = (Cen_Shifts[i] - xavg);
// *b += temp * (y_shifts[i] - yavg);
// xsquare += temp * temp;
// }
//
// *b /= xsquare;
// *a = yavg - (*b) * xavg;
//}
//std::vector<double> NIIRS::CentroidFind(cv::Mat& Src, std::vector<double>& y_shifts, double* CCoffset)
//{
// std::vector<double> Cen_Shifts(Src.rows);
// int i, j, height = Src.rows, width = Src.cols;
//
// cv::Mat tempSrc(Src.size(), CV_8UC1);
//
// //Do the bilaterFilter on template ROI image to make sure we can find the slanted edge more acurrate
// cv::bilateralFilter(Src, tempSrc, 3, 200, 200);
//
// // calculate the centroid of every row in ROI
// // centroid formuld: Total moments/Total amount
// for (i = 0; i < height; ++i)
// {
// double molecule = 0, denominator = 0, temp = 0;
// uchar* tempSrcPtr = tempSrc.ptr<uchar>(i);
// for (j = 0; j < width - 1; ++j)
// {
// temp = (double)tempSrcPtr[j + 1] - (double)tempSrcPtr[j];
// molecule += temp * j;
// denominator += temp;
// }
// Cen_Shifts[i] = molecule / denominator;
// }
//
// //Eliminate the noise far from slant edge(+/- 10 pixels)
// tempSrc = Src.clone();
// cv::GaussianBlur(Src, Src, cv::Size(3, 3), 0);
// for (i = 0; i < height; ++i)
// {
// uchar* SrcPtr = Src.ptr<uchar>(i);
// uchar* tempPtr = tempSrc.ptr<uchar>(i);
// for (j = int(Cen_Shifts[i]) - 10; j < int(Cen_Shifts[i]) + 10; ++j)
// {
// SrcPtr[j] = tempPtr[j];
// }
// }
//
// // check whether the edge in image is too close to the image corners
// if (Cen_Shifts[0] < 2.0 || width - Cen_Shifts[0] < 2.0)
// {
// std::cerr << "The edge in ROI is too close to the image corners" << std::endl;
// return {};
// }
//
// if (Cen_Shifts[height - 1] < 2 || width - Cen_Shifts[height - 1] < 2)
// {
// std::cerr << "The edge in ROI is too close to the image corners" << std::endl;
// return {};
// }
//
// int half_ySize = height / 2;
// int CC = Cen_Shifts[half_ySize]; //Centroid of image center
// *CCoffset = CC;
// for (i = 0; i < height; ++i)
// {
// Cen_Shifts[i] -= CC; //Calculate the Shifts between Centroids and Centroid of image center
// y_shifts[i] = i - half_ySize; //Calculate the shifts between height of image center and each row
// }
//
// return Cen_Shifts;
//}
//void NIIRS::de_Gamma(cv::Mat& Src, double gamma)
//{
// if (Src.channels() != 1) { return; }
//
// for (int i = 0; i < Src.rows; ++i)
// {
// uchar* SrcP = Src.ptr(i);
// for (int j = 0; j < Src.cols; ++j)
// {
// SrcP[j] = 255 * (pow((double)SrcP[j] / 255, 1 / gamma));
// }
// }
//}
QPair<double, double> NIIRS::calculate_rer_h(cv::Mat img, cv::Point pointA, cv::Point pointB)
{
QPair<double, double>RER_H = QPair<double, double>();
cv::Point pointLeft, pointRight;
pointLeft = pointA;
pointRight = pointB;
if (pointLeft.x > pointRight.x)
{
cv::Point temp = pointLeft;
pointLeft = pointRight;
pointRight = temp;
}
//pair中:vector<int> 存放所有相同距离时像元值,之后求平均
//pair中:int 表示离刃边距离
QMap<int, vector<int>>qmapESF;
vector<int>vecTemp;
vecTemp.push_back(127);
for (int i = -20; i < 22; i++)
qmapESF.insert(i, vecTemp);
cv::Mat mask;
cv::Point pt1, pt2;
double k = (double(pointRight.y) - pointLeft.y) / (double(pointRight.x) - pointLeft.x);
if (k < -1 || k > 1)
{
cv::Point pointBottom, pointTop;
pointBottom = pointA;
pointTop = pointB;
if (pointBottom.y > pointTop.y)
{
cv::Point temp = pointBottom;
pointBottom = pointTop;
pointTop = temp;
}
int rangeL = 0, rangeT = 0, rangeR = 0, rangeB = 0;
int cutImgTopx = 0, cutImgTopy = 0, cutImgBotx = 0, cutImgBoty = 0;
if (pointBottom.x == pointTop.x)
{
rangeL = pointBottom.x - 20;
rangeR = pointBottom.x + 20 + 4;
rangeT = pointTop.y;
rangeB = pointTop.y;
cutImgTopx = 20;
cutImgTopy = 0;
cutImgBotx = pointTop.x - pointBottom.x + 20;
cutImgBoty = pointTop.y - pointBottom.y;
}
else if (k > 0)
{
double k2 = 1 / k;
int deltay = 20, deltax;
while (25 > pow((k * deltay), 2) + pow(double(deltay), 2))
deltay = deltay + 20;
deltax = ceil(deltay / k2);
rangeL = pointBottom.x - deltax;
rangeR = pointTop.x + deltax;
rangeT = pointTop.y + deltay + 4;
rangeB = pointBottom.y - deltay;
cutImgTopx = pointBottom.x - rangeL;
cutImgTopy = pointBottom.y - rangeB;
cutImgBotx = pointTop.x - rangeL;
cutImgBoty = pointTop.y - rangeB;
}
else//k<0
{
double k2 = 1 / k;
int deltay = 20, deltax;
while (25 > pow((k * deltay), 2) + pow(double(deltay), 2))
deltay = deltay + 20;
deltax = ceil(deltay / -k2);
//qDebug() << deltay << deltay / k2 << "->" << deltax;
rangeL = pointTop.x - deltax;
rangeR = pointBottom.x + deltax;
rangeB = pointBottom.y - deltay;
rangeT = pointTop.y + deltay + 4;
cutImgBotx = pointTop.x - rangeL;
cutImgBoty = pointTop.y - rangeB;
cutImgTopx = pointBottom.x - rangeL;
cutImgTopy = pointBottom.y - rangeB;
}
if (rangeL - rangeR == 0)
return RER_H;
if (rangeL < 0)
rangeL = 0;
if (rangeR > img.cols)
rangeR = img.cols;
if (rangeT > img.rows)
rangeT = img.rows;
if (rangeB < 0)
rangeB = 0;
//qDebug() << QString("Y rangeB:%1,rangeT:%2; rangeL:%3,rangeR:%4").arg(rangeB).arg(rangeT).arg(rangeL).arg(rangeR);
//QString outBmpPath = outPath + "/" + filename;
cv::Mat miniEdgeX = img(cv::Range(rangeB, rangeT), cv::Range(rangeL, rangeR));
cv::Mat evalueImg;
miniEdgeX.copyTo(evalueImg);
pt1 = cv::Point(cutImgTopx, cutImgTopy);
pt2 = cv::Point(cutImgBotx, cutImgBoty);
cv::Mat temp(evalueImg.rows, evalueImg.cols, evalueImg.type(), cv::Scalar(0));
mask = temp;
int rows = evalueImg.rows;
int cols = evalueImg.cols;
for (int row = 0; row < rows; row++)
{
for (int col = 0; col < cols; col++)
{
//当前采样点
cv::Point samplingPoint(col, row);
//用于计算当前点到刃边距离
array<cv::Point, 3>sampling_to_edge = { samplingPoint, pt1, pt2 };
//求当前点到刃边的垂足获取垂足的col用于比较确认dis的正负
cv::Point2d samplingPointFoot = calculateFootPoint(sampling_to_edge);
if (samplingPointFoot.y < pt1.y || samplingPointFoot.y > pt2.y)
continue;
//采样点距离刃边的距离
int dis = pointDistance(samplingPointFoot, samplingPoint);
if (dis > 21)
continue;
mask.at<uchar>(row, col) = evalueImg.at<uchar>(row, col);
if (samplingPointFoot.x > col)
dis = 0 - dis;
// int value = evalueImg.at<uchar>(row, col);
//存入vecESF中
//遍历vecESF寻找相同距离的值
// if (qmapESF.contains(dis))
// {
vector<int>pixValues = qmapESF.value(dis);
pixValues.push_back(evalueImg.at<uchar>(row, col));
qmapESF.insert(dis, pixValues);
// }
// else
// {
// vector<int>pixValues;
// pixValues.push_back(evalueImg.at<uchar>(row, col));
// qmapESF.insert(dis, pixValues);
// }
}
}
}
else
{
int rangeL = 0, rangeT = 0, rangeR = 0, rangeB = 0;
int cutImgLTx = 0, cutImgLTy = 0, cutImgRBx = 0, cutImgRBy = 0;
if (k == 0)
{
rangeL = pointLeft.x;
rangeR = pointRight.x;
rangeT = pointRight.y + 20 + 4;
rangeB = pointLeft.y - 20;
cutImgLTx = pointLeft.x - pointLeft.x;
cutImgLTy = pointLeft.y + 20 - pointLeft.y;
cutImgRBx = pointRight.x - pointLeft.x;
cutImgRBy = pointRight.y + 20 - pointLeft.y;
}
else if (k > 0)
{
double k2 = 1 / k;
int deltaY = 20, deltaX;
while (25 > pow((k * deltaY), 2) + pow(double(deltaY), 2))
deltaY = deltaY + 20;
deltaX = ceil(deltaY / k2);
rangeL = pointLeft.x - deltaX;
rangeR = pointRight.x + deltaX;
rangeT = pointRight.y + deltaY + 4;
rangeB = pointLeft.y - deltaY;
cutImgLTx = pointLeft.x - rangeL;
cutImgLTy = pointLeft.y - rangeB;
cutImgRBx = pointRight.x - rangeL;
cutImgRBy = pointRight.y - rangeB;
}
else//k<0
{
double k2 = 1 / k;
int deltay = 20, deltax;
while (25 > pow((k * deltay), 2) + pow(double(deltay), 2))
deltay = deltay + 20;
deltax = ceil(deltay / -k2);
rangeL = pointLeft.x - deltax;
rangeR = pointRight.x + deltax;
rangeB = pointRight.y - deltay;
rangeT = pointLeft.y + deltay + 4;
cutImgLTx = pointLeft.x - rangeL;
cutImgLTy = pointLeft.y - rangeB;
cutImgRBx = pointRight.x - rangeL;
cutImgRBy = pointRight.y - rangeB;
}
if (rangeL - rangeR == 0)
return RER_H;
if (rangeL < 0)
rangeL = 0;
if (rangeB < 0)
rangeB = 0;
if (rangeT > img.rows)
rangeT = img.rows;
if (rangeR > img.cols)
rangeR = img.cols;
//qDebug() << QString("rangeB:%1,rangeT:%2; rangeL:%3,rangeR:%4").arg(rangeB).arg(rangeT).arg(rangeL).arg(rangeR);
cv::Mat miniEdgeX = img(cv::Range(rangeB, rangeT), cv::Range(rangeL, rangeR));
cv::Mat evalueImg;
miniEdgeX.copyTo(evalueImg);
pt1 = cv::Point(cutImgLTx, cutImgLTy);
pt2 = cv::Point(cutImgRBx, cutImgRBy);
cv::Mat temp(evalueImg.rows, evalueImg.cols, evalueImg.type(), cv::Scalar(0));
mask = temp;
int rows = evalueImg.rows;
int cols = evalueImg.cols;
for (int row = 0; row < rows; row++)
{
for (int col = 0; col < cols; col++)
{
//当前采样点
cv::Point samplingPoint(col, row);
//用于计算当前点到刃边距离
array<cv::Point, 3>sampling_to_edge = { samplingPoint, pt1, pt2 };
//求当前点到刃边的垂足获取垂足的col用于比较确认dis的正负
cv::Point2d samplingPointFoot = calculateFootPoint(sampling_to_edge);
if (samplingPointFoot.x < pt1.x || samplingPointFoot.x > pt2.x)
continue;
//采样点距离刃边的距离
int dis = pointDistance(samplingPointFoot, samplingPoint);
if (dis > 21)
continue;
mask.at<uchar>(row, col) = evalueImg.at<uchar>(row, col);
if (samplingPointFoot.y > row)
dis = 0 - dis;
// int value = evalueImg.at<uchar>(row, col);
//存入vecESF中
//遍历vecESF寻找相同距离的值
// if (qmapESF.contains(dis))
// {
vector<int>pixValues = qmapESF.value(dis);
pixValues.push_back(evalueImg.at<uchar>(row, col));
qmapESF.insert(dis, pixValues);
// }
// else
// {
// vector<int>pixValues;
// pixValues.push_back(evalueImg.at<uchar>(row, col));
// qmapESF.insert(dis, pixValues);
// }
}
}
}
//cv::line(mask, pt1, pt2, cv::Scalar(0, 0, 0));
//vecESF中存的值求平均得ESF
QMap<int, int>ESF;
QMap<int, vector<int>>::const_iterator it = qmapESF.constBegin();
while (it != qmapESF.constEnd())
{
if (it.value().size() == 1)
ESF.insert(it.key(), it.value().at(0));
else
{
vector<int>samdisValue = it.value();
int sum = 0;
int mean = 0;
for (int valuesize = 0; valuesize < samdisValue.size(); valuesize++)
sum = sum + samdisValue[valuesize];
int sameSize = samdisValue.size();
mean = sum / samdisValue.size();
ESF.insert(it.key(), mean);
}
++it;
}
QMap<int, int>::const_iterator it2 = ESF.constBegin();
int min = it2.value();
int max = it2.value();
while (it2 != ESF.constEnd())
{
if (min > it2.value())
min = it2.value();
if (max < it2.value())
max = it2.value();
++it2;
}
int edgeValuePdot5 = ESF.value(2);
int edgeValueNdot5 = ESF.value(-2);
double ER_p05 = (double(edgeValuePdot5) - min) / (double(max) - min);
double ER_n05 = (double(edgeValueNdot5) - min) / (double(max) - min);
double resultRER = ER_p05 - ER_n05;
int edgeValue125 = ESF.value(5);
if (resultRER < 0)
{
edgeValue125 = ESF.value(-5);
resultRER = abs(resultRER);
}
double resultH = (double(edgeValue125) - min) / (double(max) - min);
RER_H.first = resultRER;
RER_H.second = resultH;
if (resultRER > 0.55)
{
cv::line(mask, pt1, pt2, cv::Scalar(0, 0, 0));
qDebug() << "resultRER:" << resultRER << "resultH:" << resultH;
}
return RER_H;
}
cv::Point2d NIIRS::calculateFootPoint(array<cv::Point, 3>& ps)
{
cv::Point2d p(0.0, 0.0); // 垂足
if (ps.at(1).x == ps.at(2).x) // 线与x轴垂直
{
p.x = ps.at(1).x;
p.y = ps.at(0).y;
}
else if (ps.at(1).y == ps.at(2).y) // 线与y轴垂直
{
p.x = ps.at(0).x;
p.y = ps.at(1).y;
}
else // 线与x轴y轴都不垂直
{
double a1 = double(ps.at(1).y) - double(ps.at(2).y);
double b1 = double(ps.at(2).x) - double(ps.at(1).x);
double c1 = double(ps.at(1).x) * double(ps.at(2).y) - double(ps.at(2).x) * float(ps.at(1).y);
//pFoot.x = (B * B * pA.x - A * B * pA.y - A * C) / (A * A + B * B);
//pFoot.y = (A * A * pA.y - A * B * pA.x - B * C) / (A * A + B * B);
double footx = (b1 * b1 * double(ps.at(0).x) - a1 * b1 * double(ps.at(0).y) - a1 * c1) / (a1 * a1 + b1 * b1);
double footy = (a1 * a1 * double(ps.at(0).y) - a1 * b1 * double(ps.at(0).x) - b1 * c1) / (a1 * a1 + b1 * b1);
//p.x = (b1 * b1 * ps.at(0).x - a1 * b1 * ps.at(0).y - a1 * c1) / (a1 * a1 + b1 * b1);
//p.y = (a1 * a1 * ps.at(0).y - a1 * b1 * ps.at(0).x - b1 * c1) / (a1 * a1 + b1 * b1);
p.x = footx;
p.y = footy;
}
return p;
}
QPair<double, double> NIIRS::calculateERx(cv::Mat img)
{
QPair<double, double>ER_H;
cv::Mat imgGray, imgGrayX4;
if (img.channels() == 3)
cv::cvtColor(img, imgGray, CV_BGR2GRAY);
else
imgGray = img;
//重采样为原来4倍用于1/4像元大小的超采样
cv::pyrUp(imgGray, imgGrayX4);
cv::pyrUp(imgGrayX4, imgGrayX4);
////高斯滤波
//cv::Mat imgGaussBlur;
//cv::GaussianBlur(imgGray, imgGaussBlur, cv::Size(3, 3), 3, 3);
//cv::imshow("imgGaussBlur", imgGaussBlur);
cv::Mat imgThres;
cv::threshold(imgGray, imgThres, 127, 255, cv::THRESH_BINARY);
//cv::imwrite("D:\\A_testdata\\niirs_test\\imgThresx.png", imgThres);
//cv::imshow("imgThres", imgThres);
//cv::waitKey(1);
//canny边缘提取
cv::Mat imgCanny;
cv::Canny(imgThres, imgCanny, 100, 200);
//cv::imshow("imgCanny", imgCanny);
//cv::waitKey(1);
vector<cv::Vec4i>R_lines;
cv::HoughLinesP(imgCanny, R_lines, 1, CV_PI / 180, 10, 25, 100);
//获取所有直线的端点坐标寻找两两之间距离最大的作为直线端点pt1、pt2
//for (size_t i = 0; i < R_lines.size(); i++)
//{
// cv::Vec4i l = R_lines[i];
// cv::line(imgGrayX4, cv::Point(l[0] * 4, l[1] * 4), cv::Point(l[2] * 4, l[3] * 4), cv::Scalar(255 / (i+1)), 3);
//}
//cv::imshow("imgGrayX4", imgGrayX4);
//return 0.0;
vector<cv::Point>top_points;
vector<cv::Point>bot_points;
if (R_lines.size() == 0)
{
qDebug() << "find no line in edge x";
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("无法从输入数据中提取刃边"));
return ER_H;
}
for (size_t z = 0; z < R_lines.size(); z++)
{
cv::Vec4i p = R_lines[z];
//cout << "line x0: " << cv::Point(p[0], p[1]) <<", x1: "<< cv::Point(p[2], p[3]) << std::endl;
top_points.push_back(cv::Point(p[0], p[1]));
bot_points.push_back(cv::Point(p[2], p[3]));
}
//用于存放点之间距离
cv::Mat img_dis(top_points.size(), bot_points.size(), CV_32FC1, cv::Scalar(0));
for (int row = 0; row < top_points.size(); row++)
{
cv::Vec<float, 1>* data_Ptr = img_dis.ptr<cv::Vec<float, 1>>(row);
for (int col = 0; col < bot_points.size(); col++)
{
if (row == col)
data_Ptr[col] = 0.0;
else
data_Ptr[col] = pointDistance(top_points[row], bot_points[col]);
}
}
double minv, maxv;
cv::Point pt_min, pt_max;
cv::minMaxLoc(img_dis, &minv, &maxv, &pt_min, &pt_max);
//std::cout << "img_dis = " << std::endl << img_dis << std::endl;
//std::cout << "maxv = " << maxv << std::endl;
//std::cout << "idx_max = " << pt_max << std::endl;
//刃边端点
cv::Point pt1 = cv::Point(top_points[pt_max.y].x * 4, top_points[pt_max.y].y * 4);
cv::Point pt2 = cv::Point(bot_points[pt_max.x].x * 4, bot_points[pt_max.x].y * 4);
//std::cout << "Edge End Point: " << pt1 << ", " << pt2 << std::endl;
cv::Mat masksrc(imgCanny.rows * 4, imgCanny.cols * 4, imgCanny.type(), cv::Scalar(0));
int rows = masksrc.rows;
int cols = masksrc.cols;
//cout << "row:" << rows << ", col:" << cols << std::endl;
//cv::line(masksrc, pt1, pt2, cv::Scalar(255), 1, 8);
//cv::imshow("xxxedge", masksrc);
//cv::waitKey(1);
//cv::imwrite("D:\\A_testdata\\niirs_test\\imgLinex.png", masksrc);
//pair中:vector<int> 存放所有相同距离时像元值,之后求平均
//pair中:int 表示离刃边距离
vector<pair<int, vector<int>>>vecESF;
for (int row = 0; row < rows; row++)
{
for (int col = 0; col < cols; col++)
{
//当前采样点
cv::Point samplingPoint(col, row);
//用于计算当前点到刃边距离
array<cv::Point, 3>sampling_to_edge = { samplingPoint, pt1, pt2 };
//采样点距离刃边的距离
int dis = 0;
//cout << "mask at row:" << row << ",col:" << col;
//求当前点到刃边的垂足获取垂足的col用于比较确认dis的正负
cv::Point samplingPointFoot = calculate_foot_point(sampling_to_edge);
//cout << ",edge foot at row:" << samplingPointFoot.y << ",col:" << samplingPointFoot.x;
if (samplingPointFoot.x < col)
{
//dis = calculate_distance(sampling_to_edge);
dis = pointDistance(samplingPointFoot, samplingPoint);
}
else
{
//dis = 0 - calculate_distance(sampling_to_edge);
dis = 0 - pointDistance(samplingPointFoot, samplingPoint);
}
int value = imgGrayX4.at<uchar>(row, col);
//cout << ",distance to edge:" << dis << ",value:"
// << value << std::endl;
pair<int, vector<int>>pix_dis_value;
//存入vecESF中
if (vecESF.size() == 0)//vecESF大小为零直接放入
{
pix_dis_value.first = dis;
pix_dis_value.second.push_back(imgGrayX4.at<uchar>(row, col));
vecESF.push_back(pix_dis_value);
}
else//vecESF大小不为零查找有无距离相同的点存入
{
//遍历vecESF寻找相同距离的值
for (int vecsize = 0; vecsize < vecESF.size(); vecsize++)
{
if (vecESF[vecsize].first == dis)//有,存入
{
vecESF[vecsize].second.push_back(imgGrayX4.at<uchar>(row, col));
break;
}
if (vecsize == vecESF.size() - 1)//遍历至最后找不到,新建存入
{
pix_dis_value.first = dis;
pix_dis_value.second.push_back(imgGrayX4.at<uchar>(row, col));
vecESF.push_back(pix_dis_value);
break;//上一行vecESF.size+1,break防止无限循环
}
}
}
}
}
//cout << "vecESF.size():" << vecESF.size() << std::endl;
//vecESF中存的值求平均得ESF
vector<pair<int, int>>ESF;
//求ESF
for (int vecsize = 0; vecsize < vecESF.size(); vecsize++)
{
if (vecESF[vecsize].second.size() == 1)
{
pair<int, int>samedisESF(vecESF[vecsize].first, vecESF[vecsize].second[0]);
ESF.push_back(samedisESF);
}
else
{
vector<int>samdisValue = vecESF[vecsize].second;
int sum = 0;
int mean = 0;
for (int valuesize = 0; valuesize < samdisValue.size(); valuesize++)
{
sum = sum + samdisValue[valuesize];
}
int sameSize = samdisValue.size();
mean = sum / samdisValue.size();
pair<int, int>samedisESF(vecESF[vecsize].first, mean);
ESF.push_back(samedisESF);
}
}
//遍历ESF距离大于0求平均距离小于0求平均
pair<double, double>DN_max_min;
vector<int>positive_side, negative_side;
// 距离刃边+0.5 距离刃边-0.5 距离刃边+0.5
double edgeValue_p05, edgeValue_n05, edgeValue_p1dot25;
for (int i = 0; i < ESF.size(); i++)
{
if (ESF[i].first > 0)
positive_side.push_back(ESF[i].second);
else if (ESF[i].first < 0)
negative_side.push_back(ESF[i].second);
if (ESF[i].first == 2)
edgeValue_p05 = ESF[i].second;
if (ESF[i].first == -2)
edgeValue_n05 = ESF[i].second;
if (ESF[i].first == 5 && ESF[i].second > 0)
edgeValue_p1dot25 = abs(ESF[i].second);
}
double sumpos = accumulate(begin(positive_side), end(positive_side), 0.0);
double sumneg = accumulate(begin(negative_side), end(negative_side), 0.0);
double mean_positive = sumpos / positive_side.size();
double mean_negative = sumneg / negative_side.size();
DN_max_min.first = mean_positive > mean_negative ? mean_positive : mean_negative;
DN_max_min.second = mean_positive < mean_negative ? mean_positive : mean_negative;
//cout << "lager side:" << DN_max_min.first << ",small side:" << DN_max_min.second << std::endl;
//edgeValue_n05
//edgeValue_p05
double ER_p05 = (edgeValue_p05 - DN_max_min.second) / (DN_max_min.first - DN_max_min.second);
double ER_n05 = (edgeValue_n05 - DN_max_min.second) / (DN_max_min.first - DN_max_min.second);
double Hx = double(edgeValue_p1dot25 - DN_max_min.second) / double(DN_max_min.first - DN_max_min.second);
double ER = ER_p05 - ER_n05;
ER_H.first = ER;
ER_H.second = Hx;
return ER_H;
/*-----------------------------------------------------------------------------------------------------------*/
////ESF求斜率求LSF
////在ESF最后扩展一个用于求斜率
//pair<int, int>tempend(ESF[ESF.size()].first + 1, ESF[ESF.size()].second);
//ESF.push_back(tempend);
//for (int i = 0; i < ESF.size() - 1; i++)//遍历至倒数第二位
//{
// pair<int, int>temp;
// temp.first = ESF[i].first;
// temp.second = (ESF[i].second - ESF[i + 1].second) / (ESF[i].first - ESF[i + 1].first);
// LSF.push_back(temp);
//}
//vector<int>vec_LSF[2];
//for (int vecsize = 0; vecsize < LSF.size(); vecsize++)
//{
// vec_LSF[0].push_back(LSF[vecsize].first);
// vec_LSF[1].push_back(LSF[vecsize].second);
//}
////一行
////cv::Mat mat_LSF = cv::Mat::zeros(1, vec_LSF[1].size(), CV_32F);// = cv::Mat(vec_LSF)
////for (int i = 0; i < vec_LSF[1].size(); i++)
//// mat_LSF.at<float>(0, i) = float(vec_LSF[1][i]);
////cv::imwrite("mat_LSF.png", mat_LSF);
////std::cout <<"mat_LSF channels: " << mat_LSF.channels() << ", size(width x height): " << mat_LSF.size() << std::endl;
//cv::Mat fft_real, fft_imag, fft_complex;//复数实部,复数虚部,完整复数
//fft_real = cv::Mat::zeros(1, vec_LSF[1].size(), CV_32F);
//fft_imag = cv::Mat::zeros(1, vec_LSF[1].size(), CV_32F);
//for (int i = 0; i < vec_LSF[1].size(); i++)
//{
// fft_real.at<float>(0, i) = float(vec_LSF[1][i]);
// fft_imag.at<float>(0, i) = 0;
//}
//std::cout << "1.fft_real channels: " << fft_real.channels() << ", size(width x height): " << fft_real.size() << std::endl;
//std::cout << "1.fft_imag channels: " << fft_imag.channels() << ", size(width x height): " << fft_imag.size() << std::endl;
//vector<cv::Mat>vec_complex;
//vec_complex.push_back(fft_real);
//vec_complex.push_back(fft_imag);
//cv::merge(vec_complex, fft_complex);
//cv::dft(fft_complex, fft_complex);
//cv::Mat twoChannels[2];
////重新拆分出实部虚部
//cv::split(fft_complex, twoChannels);
////std::cout << "----2.fft_real----" << std::endl << twoChannels[0] << std::endl
//// << "channels: " << twoChannels[0].channels() << ", size(width x height): " << twoChannels[0].size() << std::endl;
////std::cout << "----2.fft_imag----" << std::endl << twoChannels[1] << std::endl
//// << "channels: " << twoChannels[1].channels() << ", size(width x height): " << twoChannels[1].size() << std::endl;
//cv::Mat matMTF = cv::Mat::zeros(1, vec_LSF[1].size(), CV_32F);
//for (int i = 0; i < vec_LSF[1].size(); i++)
//{
// matMTF.at<float>(0, i)
// = sqrt(float(twoChannels[0].at<float>(0, i)) * float(twoChannels[0].at<float>(0, i))
// + float(twoChannels[1].at<float>(0, i)) * float(twoChannels[1].at<float>(0, i)));
//}
//std::cout << "matMTF" << std::endl << matMTF << std::endl
// << "channels: " << matMTF.channels() << ", size(width x height): " << matMTF.size() << std::endl;
////暂时取前1/2作为mtfxy轴归一化做积分算ER
//cout << "--------------------------------------------" << std::endl;
//return 0.0;
}
QPair<double, double> NIIRS::calculateERy(cv::Mat img)
{
QPair<double, double>ER_H;
cv::Mat imgGray, imgGrayX4;
if (img.channels() == 3)
cv::cvtColor(img, imgGray, CV_BGR2GRAY);
else
imgGray = img;
//重采样为原来4倍用于1/4像元大小的超采样
cv::pyrUp(imgGray, imgGrayX4);
cv::pyrUp(imgGrayX4, imgGrayX4);
//cv::imshow("yyyimgGrayX4", imgGrayX4);
////高斯滤波
//cv::Mat imgGaussBlur;
//cv::GaussianBlur(imgGray, imgGaussBlur, cv::Size(3, 3), 3, 3);
cv::Mat imgThres;
cv::threshold(imgGray, imgThres, 127, 255, cv::THRESH_BINARY);
//cv::imwrite("D:\\A_testdata\\niirs_test\\imgGrayy.png", imgThres);
//cv::imshow("imgThresy", imgThres);
//cv::waitKey(1);
//canny边缘提取
cv::Mat imgCanny;
cv::Canny(imgThres, imgCanny, 100, 200);
//cv::imshow("imgCannyy", imgCanny);
//cv::waitKey(1);
vector<cv::Vec4i>R_lines;
cv::HoughLinesP(imgCanny, R_lines, 1, CV_PI / 180, 10, 25, 100);
//获取所有直线的端点坐标寻找两两之间距离最大的作为直线端点pt1、pt2
vector<cv::Point>top_points;
vector<cv::Point>bot_points;
if (R_lines.size() == 0)
{
qDebug() << "find no line in edge y";
ui.lineEdit_INFO->setText(QString::fromLocal8Bit("无法从输入数据中提取刃边"));
return ER_H;
}
for (size_t z = 0; z < R_lines.size(); z++)
{
cv::Vec4i p = R_lines[z];
//cout << "line x0: " << cv::Point(p[0], p[1]) << ", x1: " << cv::Point(p[2], p[3]) << std::endl;
top_points.push_back(cv::Point(p[0], p[1]));
bot_points.push_back(cv::Point(p[2], p[3]));
}
//用于存放点之间距离
cv::Mat img_dis(top_points.size(), bot_points.size(), CV_32FC1, cv::Scalar(0));
for (int row = 0; row < top_points.size(); row++)
{
cv::Vec<float, 1>* data_Ptr = img_dis.ptr<cv::Vec<float, 1>>(row);
for (int col = 0; col < bot_points.size(); col++)
{
if (row == col)
data_Ptr[col] = 0.0;
else
data_Ptr[col] = pointDistance(top_points[row], bot_points[col]);
}
}
double minv, maxv;
cv::Point pt_min, pt_max;
cv::minMaxLoc(img_dis, &minv, &maxv, &pt_min, &pt_max);
//std::cout << "img_dis = " << std::endl << img_dis << std::endl;
//std::cout << "maxv = " << maxv << std::endl;
//std::cout << "idx_max = " << pt_max << std::endl;
//刃边端点
cv::Point pt1 = cv::Point(top_points[pt_max.y].x * 4, top_points[pt_max.y].y * 4);
cv::Point pt2 = cv::Point(bot_points[pt_max.x].x * 4, bot_points[pt_max.x].y * 4);
//std::cout << "Edge End Point: " << pt1 << ", " << pt2 << std::endl;
cv::Mat masksrc(imgCanny.rows * 4, imgCanny.cols * 4, imgCanny.type(), cv::Scalar(0));
int rows = masksrc.rows;
int cols = masksrc.cols;
//cout << "row:" << rows << ", col:" << cols << std::endl;
cv::line(masksrc, pt1, pt2, cv::Scalar(255), 1, 8);
//cv::imshow("yyyedge", masksrc);
//cv::imwrite("D:\\A_testdata\\niirs_test\\imgLiney.png", masksrc);
//pair中:vector<int> 存放所有相同距离时像元值,之后求平均
//pair中:int 表示离刃边距离
vector<pair<int, vector<int>>>vecESF;
for (int row = 0; row < rows; row++)
{
for (int col = 0; col < cols; col++)
{
//当前采样点
cv::Point samplingPoint(col, row);
//用于计算当前点到刃边距离
array<cv::Point, 3>sampling_to_edge = { samplingPoint, pt1, pt2 };
//采样点距离刃边的距离
int dis = 0;
//cout << "mask at row:" << row << ",col:" << col;
//求当前点到刃边的垂足获取垂足的col用于比较确认dis的正负
cv::Point samplingPointFoot = calculate_foot_point(sampling_to_edge);
//cout << ",edge foot at row:" << samplingPointFoot.y << ",col:" << samplingPointFoot.x;
if (samplingPointFoot.x < col)
{
//dis = calculate_distance(sampling_to_edge);
dis = pointDistance(samplingPointFoot, samplingPoint);
}
else
{
//float a1 = float(sampling_to_edge.at(1).y) - float(sampling_to_edge.at(2).y);
//float b1 = float(sampling_to_edge.at(2).x) - float(sampling_to_edge.at(1).x);
//float c1 = float(sampling_to_edge.at(1).x) * float(sampling_to_edge.at(2).y)
// - float(sampling_to_edge.at(2).x) * float(sampling_to_edge.at(1).y);
//dis = abs(a1 * sampling_to_edge.at(0).x + b1 * sampling_to_edge.at(0).y + c1)
// / sqrt(a1 * a1 + b1 * b1);
dis = 0 - pointDistance(samplingPointFoot, samplingPoint);
}
int value = imgGrayX4.at<uchar>(row, col);
//cout << ",distance to edge:" << dis << ",value:"
// << value << std::endl;
pair<int, vector<int>>pix_dis_value;
//存入vecESF中
if (vecESF.size() == 0)//vecESF大小为零直接放入
{
pix_dis_value.first = dis;
pix_dis_value.second.push_back(imgGrayX4.at<uchar>(row, col));
vecESF.push_back(pix_dis_value);
}
else//vecESF大小不为零查找有无距离相同的点存入
{
//遍历vecESF寻找相同距离的值
for (int vecsize = 0; vecsize < vecESF.size(); vecsize++)
{
if (vecESF[vecsize].first == dis)//有,存入
{
vecESF[vecsize].second.push_back(imgGrayX4.at<uchar>(row, col));
break;
}
if (vecsize == vecESF.size() - 1)//遍历至最后找不到,新建存入
{
pix_dis_value.first = dis;
pix_dis_value.second.push_back(imgGrayX4.at<uchar>(row, col));
vecESF.push_back(pix_dis_value);
break;//上一行vecESF.size+1,break防止无限循环
}
}
}
}
}
//cout << "vecESF.size():" << vecESF.size() << std::endl;
//vecESF中存的值求平均得ESF
vector<pair<int, int>>ESF;
//ESF求斜率得LSF
vector<pair<int, int>>LSF;
//求ESF
for (int vecsize = 0; vecsize < vecESF.size(); vecsize++)
{
if (vecESF[vecsize].second.size() == 1)
{
pair<int, int>samedisESF(vecESF[vecsize].first, vecESF[vecsize].second[0]);
ESF.push_back(samedisESF);
}
else
{
vector<int>samdisValue = vecESF[vecsize].second;
int sum = 0;
int mean = 0;
for (int valuesize = 0; valuesize < samdisValue.size(); valuesize++)
{
sum = sum + samdisValue[valuesize];
}
int sameSize = samdisValue.size();
mean = sum / samdisValue.size();
pair<int, int>samedisESF(vecESF[vecsize].first, mean);
ESF.push_back(samedisESF);
}
}
//遍历ESF距离大于0求平均距离小于0求平均
pair<double, double>DN_max_min;
vector<int>positive_side, negative_side;
// 距离刃边+0.5 距离刃边-0.5
double edgeValue_p05, edgeValue_n05, edgeValue_p1dot25;
for (int i = 0; i < ESF.size(); i++)
{
//cout << ESF[vecSize].first << ",," << ESF[vecSize].second << std::endl;
if (ESF[i].first > 0)
positive_side.push_back(ESF[i].second);
else if (ESF[i].first < 0)
negative_side.push_back(ESF[i].second);
if (ESF[i].first == 2)
edgeValue_p05 = ESF[i].second;
if (ESF[i].first == -2)
edgeValue_n05 = ESF[i].second;
if (ESF[i].first == 5 && ESF[i].second > 0)
edgeValue_p1dot25 = abs(ESF[i].second);
}
double sumpos = accumulate(begin(positive_side), end(positive_side), 0.0);
double sumneg = accumulate(begin(negative_side), end(negative_side), 0.0);
double mean_positive = sumpos / positive_side.size();
double mean_negative = sumneg / negative_side.size();
DN_max_min.first = mean_positive > mean_negative ? mean_positive : mean_negative;
DN_max_min.second = mean_positive < mean_negative ? mean_positive : mean_negative;
//cout << "lager side:" << DN_max_min.first << ",small side:" << DN_max_min.second << std::endl;
//edgeValue_n05
//edgeValue_p05
double ER_p05 = (edgeValue_p05 - DN_max_min.second) / (DN_max_min.first - DN_max_min.second);
double ER_n05 = (edgeValue_n05 - DN_max_min.second) / (DN_max_min.first - DN_max_min.second);
double Hx = double(edgeValue_p1dot25 - DN_max_min.second) / double(DN_max_min.first - DN_max_min.second);
double ER = ER_p05 - ER_n05;
ER_H.first = ER;
ER_H.second = Hx;
return ER_H;
}
float NIIRS::pointDistance(cv::Point pt1, cv::Point pt2)
{
//float dis = sqrtf(((float)pt1.x - (float)pt2.x) * ((float)pt1.x - (float)pt2.x)
// + ((float)pt1.y - (float)pt2.y) * ((float)pt1.y - (float)pt2.y));
float dis = sqrt((float(pt1.x) - float(pt2.x)) * (float(pt1.x) - float(pt2.x))
+ (float(pt1.y) - float(pt2.y)) * (float(pt1.y) - float(pt2.y)));
//std::cout << "pt1:" << pt1 << "pt2" << pt2 << "pointDistance:" << dis << std::endl;
return dis;
}
//int NIIRS::calculate_distance(array<cv::Point, 3>& ps)
//{
// int d = 0;// 距离
// int a1 = -(ps.at(2).y - ps.at(1).y);
// int b1 = ps.at(2).x - ps.at(1).x;
// int c1 = (ps.at(2).y - ps.at(1).y) * ps.at(1).x - (ps.at(2).x - ps.at(1).x) * ps.at(1).y;
// d = abs(a1 * ps.at(0).x + b1 * ps.at(0).y + c1) / sqrt(a1 * a1 + b1 * b1);
// //allnum++;
// //if (allnum % 100 == 0)
// // cout << "calculate_distance:" << d << ",at:" << allnum << std::endl;
// return d;
//}
cv::Point NIIRS::calculate_foot_point(array<cv::Point, 3>& ps)
{
cv::Point p(0, 0); // 垂足
if (ps.at(1).x == ps.at(2).x) // 线与x轴垂直
{
p.x = ps.at(1).x;
p.y = ps.at(0).y;
}
else if (ps.at(1).y == ps.at(2).y) // 线与y轴垂直
{
p.x = ps.at(0).x;
p.y = ps.at(1).y;
}
else // 线与x轴y轴都不垂直
{
float a1 = float(ps.at(1).y) - float(ps.at(2).y);
float b1 = float(ps.at(2).x) - float(ps.at(1).x);
float c1 = float(ps.at(1).x) * float(ps.at(2).y) - float(ps.at(2).x) * float(ps.at(1).y);
//pFoot.x = (B * B * pA.x - A * B * pA.y - A * C) / (A * A + B * B);
//pFoot.y = (A * A * pA.y - A * B * pA.x - B * C) / (A * A + B * B);
float footx = (b1 * b1 * float(ps.at(0).x) - a1 * b1 * float(ps.at(0).y) - a1 * c1) / (a1 * a1 + b1 * b1);
float footy = (a1 * a1 * float(ps.at(0).y) - a1 * b1 * float(ps.at(0).x) - b1 * c1) / (a1 * a1 + b1 * b1);
//p.x = (b1 * b1 * ps.at(0).x - a1 * b1 * ps.at(0).y - a1 * c1) / (a1 * a1 + b1 * b1);
//p.y = (a1 * a1 * ps.at(0).y - a1 * b1 * ps.at(0).x - b1 * c1) / (a1 * a1 + b1 * b1);
p.x = footx;
p.y = footy;
}
return p;
}
//cv::Point NIIRS::line_intersection(array<cv::Point, 2>& line1, array<cv::Point, 2>& line2)
//{
// cv::Point inter;
//
// double ka, kb;
// // line1 y2 line1 y1 line1 x2 line1 x1
// //ka = (double)(LineA[3] - LineA[1]) / (double)(LineA[2] - LineA[0]); //求出LineA斜率
// // line2 y2 line2 y1 line2 x2 line2 x1
// //kb = (double)(LineB[3] - LineB[1]) / (double)(LineB[2] - LineB[0]); //求出LineB斜率
//
// // line1 x1 line1 y1 line2 x1 line2 y1
// //inter.x = (ka * LineA[0] - LineA[1] - kb * LineB[0] + LineB[1]) / (ka - kb);
// // line1 x1 line2 x1 line2 y1 line1 y1
// //inter.y = (ka * kb * (LineA[0] - LineB[0]) + ka * LineB[1] - kb * LineA[1]) / (ka - kb);
// ka = (double(line1[1].y) - double(line1[0].y)) / (double(line1[1].x) - double(line1[0].x)); //求出LineA斜率
// kb = (double(line2[1].y) - double(line2[0].y)) / (double(line2[1].x) - double(line2[0].x)); //求出LineB斜率
//
// inter.x = (ka * line1[0].x - line1[0].y - kb * line2[0].x + line2[0].y) / (ka - kb);
// inter.y = (ka * kb * (double(line1[0].x) - double(line2[0].x)) + ka * line2[0].y - kb * line1[0].y) / (ka - kb);
//
// return inter;
//}
//cv::Point NIIRS::point_k_edge_intersection(cv::Point pt, double k, array<cv::Point, 2>& line)
//{
// cv::Point inter;
// // 根据点斜式求直线
// // 根据直线是水平或垂直,求两线交点
// //cout << "point_k_edge_intersection point: " << pt << std::endl;
// if (line[0].x == line[1].x)//x相等相当于x已知求y = k * (x - x1) + y1
// {
// //cout << "line[0].x == line[1].x: " << line[0] << "," << line[1] << std::endl;
// float interpoint = k * (double(line[0].x) - double(pt.x)) + double(pt.y);
// inter.x = line[0].x;
// inter.y = round(interpoint);
// }
// else if (line[0].y == line[1].y)//y相等相当于y已知求x = (y - y1) / k + x1
// {
// //cout << "line[0].y == line[1].y: " << line[0] << "," << line[1] << std::endl;
// float interpoint = (double(line[0].y) - double(pt.y)) / k + double(pt.x);
// inter.x = round(interpoint);
// inter.y = line[0].y;
// }
// //cout << "intersection: " << inter << std::endl;
//
// return inter;
//}
//基于sobel算子求梯度、陡度
double NIIRS::gradient_Sobel(cv::Mat img)
{
//assert(img.empty());
cv::Mat gray_img, sobel_x, abs_x, sobel_y, abs_y, sobel_mean;
if (img.channels() == 3)
cv::cvtColor(img, gray_img, CV_BGR2GRAY);
else
gray_img = img;
//分别计算x、y方向梯度
cv::Sobel(gray_img, sobel_x, CV_32FC1, 1, 0);
cv::Sobel(gray_img, sobel_y, CV_32FC1, 0, 1);
//cv::multiply(sobel_x, sobel_x, sobel_x);
//cv::multiply(sobel_y, sobel_y, sobel_y);
//cv::Mat sqrt_mat = sobel_x + sobel_y;
//cv::sqrt(sqrt_mat, sobel_mean);
cv::convertScaleAbs(sobel_x, abs_x);
cv::convertScaleAbs(sobel_y, abs_y);
cv::addWeighted(abs_x, 0.5, abs_y, 0.5, 0, sobel_mean);
double mean = cv::mean(sobel_mean)[0];
//cv::resize(sobel_mean, sobel_mean, cv::Size(600, 400));
return mean;
}
//基于Laplacian算子求二阶导求清晰度
double NIIRS::definition_Laplacian(cv::Mat img)
{
cv::Mat gray_img, lap_image;
if (img.channels() == 3)
cv::cvtColor(img, gray_img, CV_BGR2GRAY);
else
gray_img = img;
cv::Laplacian(gray_img, lap_image, CV_32FC1);
lap_image = cv::abs(lap_image);
return cv::mean(lap_image)[0];
}
void NIIRS::get_GLCM_0deg(cv::Mat& input, cv::Mat& dst)
{
cv::Mat src;
input.copyTo(src);
src.convertTo(src, CV_32S);
int height = src.rows;
int width = src.cols;
int max_gray_level = 0;
for (int j = 0; j < height; j++)//寻找像素灰度最大值
{
int* srcdata = src.ptr<int>(j);
for (int i = 0; i < width; i++)
{
if (srcdata[i] > max_gray_level)
{
max_gray_level = srcdata[i];
}
}
}
max_gray_level++;//像素灰度最大值加1即为该矩阵所拥有的灰度级数
dst.create(max_gray_level, max_gray_level, CV_32SC1);
dst = cv::Scalar::all(0);
for (int i = 0; i < height; i++)
{
int* srcdata = src.ptr<int>(i);
for (int j = 0; j < width - 1; j++)
{
int rows = srcdata[j];
int cols = srcdata[j + 1];
dst.ptr<int>(rows)[cols]++;
}
}
}
void NIIRS::get_GLCM_90deg(cv::Mat& input, cv::Mat& dst)
{
cv::Mat src;
input.copyTo(src);
src.convertTo(src, CV_32S);
int height = src.rows;
int width = src.cols;
int max_gray_level = 0;
for (int j = 0; j < height; j++)
{
int* srcdata = src.ptr<int>(j);
for (int i = 0; i < width; i++)
{
if (srcdata[i] > max_gray_level)
{
max_gray_level = srcdata[i];
}
}
}
max_gray_level++;
dst.create(max_gray_level, max_gray_level, CV_32SC1);
dst = cv::Scalar::all(0);
for (int i = 0; i < height - 1; i++)
{
int* srcdata = src.ptr<int>(i);
int* srcdata1 = src.ptr<int>(i + 1);
for (int j = 0; j < width; j++)
{
int rows = srcdata[j];
int cols = srcdata1[j];
dst.ptr<int>(rows)[cols]++;
}
}
}
void NIIRS::get_GLCM_45deg(cv::Mat& input, cv::Mat& dst)
{
cv::Mat src;
input.copyTo(src);
src.convertTo(src, CV_32S);
int height = src.rows;
int width = src.cols;
int max_gray_level = 0;
for (int j = 0; j < height; j++)
{
int* srcdata = src.ptr<int>(j);
for (int i = 0; i < width; i++)
{
if (srcdata[i] > max_gray_level)
{
max_gray_level = srcdata[i];
}
}
}
max_gray_level++;
dst.create(max_gray_level, max_gray_level, CV_32SC1);
dst = cv::Scalar::all(0);
for (int i = 0; i < height - 1; i++)
{
int* srcdata = src.ptr<int>(i);
int* srcdata1 = src.ptr<int>(i + 1);
for (int j = 0; j < width - 1; j++)
{
int rows = srcdata[j];
int cols = srcdata1[j + 1];
dst.ptr<int>(rows)[cols]++;
}
}
}
void NIIRS::get_GLCM_135deg(cv::Mat& input, cv::Mat& dst)
{
cv::Mat src;
input.copyTo(src);
src.convertTo(src, CV_32S);
int height = src.rows;
int width = src.cols;
int max_gray_level = 0;
for (int j = 0; j < height; j++)
{
int* srcdata = src.ptr<int>(j);
for (int i = 0; i < width; i++)
{
if (srcdata[i] > max_gray_level)
{
max_gray_level = srcdata[i];
}
}
}
max_gray_level++;
dst.create(max_gray_level, max_gray_level, CV_32SC1);
dst = cv::Scalar::all(0);
for (int i = 0; i < height - 1; i++)
{
int* srcdata = src.ptr<int>(i);
int* srcdata1 = src.ptr<int>(i + 1);
for (int j = 1; j < width; j++)
{
int rows = srcdata[j];
int cols = srcdata1[j - 1];
dst.ptr<int>(rows)[cols]++;
}
}
}
double NIIRS::calculateASM(cv::Mat& src)
{
//cv::Mat src;
//cv::cvtColor(input, src, CV_BGR2GRAY);//转为灰度图
int height = src.rows;
int width = src.cols;
int total = 0;
for (int i = 0; i < height; i++)
{
int* srcdata = src.ptr<int>(i);
for (int j = 0; j < width; j++)
{
total += srcdata[j];//求图像所有像素的灰度值的和
}
}
cv::Mat copy;
copy.create(height, width, CV_64FC1);
for (int i = 0; i < height; i++)
{
int* srcdata = src.ptr<int>(i);
double* copydata = copy.ptr<double>(i);
for (int j = 0; j < width; j++)
{
copydata[j] = (double)srcdata[j] / (double)total;//图像每一个像素的的值除以像素总和
}
}
//纹理特征的因子
double Asm = 0.0, Eng = 0.0, Con = 0.0;
for (int i = 0; i < height; i++)
{
double* srcdata = copy.ptr<double>(i);
for (int j = 0; j < width; j++)
{
Asm += srcdata[j] * srcdata[j];//能量
if (srcdata[j] > 0)
Eng -= srcdata[j] * log(srcdata[j]);//熵//+0.0000001
Con += (double)(i - j) * (double)(i - j) * srcdata[j];//对比度
//Idm += srcdata[j] / (1 + (double)(i - j) * (double)(i - j));//逆差矩
}
}
//qDebug() << "--1. Angular Sec Moment" << Asm;
//qDebug() << "--2. Entropy-----------" << Eng;
//qDebug() << "--3. contranst radio---" << Con;
return Asm;
}
double NIIRS::calculatePIQEScore(cv::Mat& img)
{
int blockSize = 16;
double activityThreshold = 0.1;
double blockImpairedThreshold = 0.1;
int windowSize = 6;
int nSegments = blockSize - windowSize + 1;
double distBlockScores = 0.0;
double NHSA = 0.0;
double c = 1.0;//常数
cv::Mat gray_img;
if (img.channels() == 3)
cv::cvtColor(img, gray_img, CV_BGR2GRAY);
else
gray_img = img;
//cv::Mat copy = gray_img;
//cv::resize(copy, copy, cv::Size(960, 540));
//cv::imshow("gray_img", copy);
gray_img.convertTo(gray_img, CV_64FC1);
int rows = gray_img.rows;
int columns = gray_img.cols;
int rowsPad = rows % blockSize;
int columnsPad = columns % blockSize;
//qDebug() << "rowsPad:" << rowsPad << "columnsPad:" << columnsPad;
bool isPadded = false;
if (rowsPad > 0 || columnsPad > 0)
{
if (rowsPad > 0)
rowsPad = blockSize - rowsPad;
if (columnsPad > 0)
columnsPad = blockSize - columnsPad;
isPadded = true;
//扩充图像向右向下扩充使图像行列能够被block整除[rowsPad向下 columnsPad向右]
cv::copyMakeBorder(gray_img, gray_img, 0, rowsPad, 0, columnsPad, cv::BORDER_REPLICATE);
//rows = gray_img.rows;
//columns = gray_img.cols;
//rowsPad = rows % blockSize;
//columnsPad = columns % blockSize;
//qDebug() << "rowsPad:" << rowsPad << "columnsPad:" << columnsPad;
}
cv::Mat muGaussfilt, gray_Dot_gray, sigmaGaussfilt;
cv::GaussianBlur(gray_img, muGaussfilt, cv::Size(7, 7), 7 / 6, 7 / 6);
//sigma = sqrt(abs(imgaussfilt(ipImage.*ipImage, 7 / 6, 'FilterSize', 7, 'Padding', 'replicate') - mu.*mu));
//1.原图与自己点乘gray_Dot_gray=gray_img.*gray_img
//2.点乘结果进行高斯滤波GaussianBlur(gray_Dot_gray, gray_Dot_gray, cv::Size(7, 7), 7 / 6, 7 / 6)
//3.sqrt(abs(gray_Dot_gray- muGaussfilt.*muGaussfilt))
//1.原图与自己点乘
gray_Dot_gray = gray_img.mul(gray_img);
//2.点乘结果进行高斯滤波
cv::GaussianBlur(gray_Dot_gray, gray_Dot_gray, cv::Size(7, 7), 7 / 6, 7 / 6);
cv::Mat absDiff, imnorm;
//3.1
cv::absdiff(gray_Dot_gray, muGaussfilt.mul(muGaussfilt), absDiff);
//3.2
cv::sqrt(absDiff, sigmaGaussfilt);
//imnorm = (ipImage - mu). / (sigma + 1);
imnorm = (gray_img - muGaussfilt) / (sigmaGaussfilt + 1);
//NoticeableArtifactsMask = false(size(imnorm, 1), size(imnorm, 2));
//NoiseMask = false(size(imnorm, 1), size(imnorm, 2));
//ActivityMask = false(size(imnorm, 1), size(imnorm, 2));
//qDebug() << imnorm.rows << "," << imnorm.cols << ","<< imnorm.rows - blockSize * 2<<","
// << "rows%blockSize" << imnorm.rows % blockSize << "cols%blockSize" << imnorm.cols % blockSize;
for (int i = 0; i < imnorm.rows; i = i + blockSize)
{
for (int j = 0; j < imnorm.cols; j = j + blockSize)
{
int WNDC = 0, WNC = 0;
cv::Mat Block = imnorm(cv::Rect(j, i, blockSize, blockSize));
cv::Scalar meanScalar, stdDevScalar;
cv::meanStdDev(Block, meanScalar, stdDevScalar);
double blockStdDev = stdDevScalar.val[0];
double blockVar = stdDevScalar.val[0] * stdDevScalar.val[0];
if (blockVar > activityThreshold)
{
double WHSA = 1.0;
NHSA = NHSA + 1;
//qDebug() << i << j << "var > activityThreshold"<< blockVar;
bool blockImpaired = noticeDistCriterion(Block, nSegments, blockSize - 1, windowSize,
blockImpairedThreshold, blockSize);
if (blockImpaired)
WNDC = 1;
double blockBeta = noiseCriterion(Block, blockSize - 1, blockVar, blockStdDev);
if (blockStdDev > 2 * blockBeta)
WNC = 1;
distBlockScores = distBlockScores + WHSA * WNDC * (1 - blockVar) + WHSA * WNC * (blockVar);
}
}
}
//cv::resize(imnorm, imnorm, cv::Size(960, 540));
//cv::imshow("imnorm", imnorm);
double Score = ((distBlockScores + c) / (c + NHSA));//* 100
return Score;
}
bool NIIRS::noticeDistCriterion(cv::Mat Block, int nSegments, int blockSize_1,
int windowSize, float blockImpairedThreshold, int blockSize)
{
cv::Mat topEdge, segTopEdge;
topEdge = Block.row(0);
segTopEdge = segmentEdge(topEdge, nSegments, blockSize_1, windowSize);
cv::Mat bottomEdge, segBottomEdge;
bottomEdge = Block.row(15);
segBottomEdge = segmentEdge(bottomEdge, nSegments, blockSize_1, windowSize);
cv::Mat leftEdge, segLeftEdge;
leftEdge = Block.col(0).t();//并转置
segLeftEdge = segmentEdge(leftEdge, nSegments, blockSize_1, windowSize);
cv::Mat rightEdge, segRightEdge;
rightEdge = Block.col(15).t();//并转置
segRightEdge = segmentEdge(rightEdge, nSegments, blockSize_1, windowSize);
bool blockImpaired = false;
//按行求标准差 [6col×11row]
//std::cout << segTopEdge.rows<< std::endl;
cv::Scalar meanScalar, stdDevScalar;
cv::Mat segRowTop, segRowBottom, segRowLeft, segRowRight;
vector<double>stdTop, stdBottom, stdLeft, stdRight;
for (int row = 0; row < segTopEdge.rows; row++)
{
//获取行
segRowTop = segTopEdge.row(row);
segRowBottom = segBottomEdge.row(row);
segRowLeft = segLeftEdge.row(row);
segRowRight = segRightEdge.row(row);
//求StdDev并存在vector中
cv::meanStdDev(segRowTop, meanScalar, stdDevScalar);
stdTop.push_back(stdDevScalar.val[0]);
cv::meanStdDev(segRowBottom, meanScalar, stdDevScalar);
stdBottom.push_back(stdDevScalar.val[0]);
cv::meanStdDev(segRowLeft, meanScalar, stdDevScalar);
stdLeft.push_back(stdDevScalar.val[0]);
cv::meanStdDev(segRowRight, meanScalar, stdDevScalar);
stdRight.push_back(stdDevScalar.val[0]);
}
for (int vecsize = 0; vecsize < stdTop.size(); vecsize++)
{
if (stdTop[vecsize] < blockImpairedThreshold ||
stdBottom[vecsize] < blockImpairedThreshold ||
stdLeft[vecsize] < blockImpairedThreshold ||
stdRight[vecsize] < blockImpairedThreshold)
{
blockImpaired = true;
break;
}
}
return blockImpaired;
}
cv::Mat NIIRS::segmentEdge(cv::Mat blockEdge, int nSegments, int blockSize_1, int windowSize)
{
cv::Mat segments(nSegments, windowSize, CV_64FC1);//空
//matlab:
//for i = 1:nSegments
// segments(i, :) = blockEdge(i : windowSize);
// if (windowSize <= (blockSize + 1))
// windowSize = windowSize + 1;
// end
//end
for (int row = 0; row < segments.rows; row++)
{
for (int col = 0; col < segments.cols; col++)
{
segments.at<qreal>(row, col) = blockEdge.at<qreal>(0, col + row);
}
}
//std::cout << "blockEdge" << std::endl << blockEdge << std::endl;
//std::cout << "segments" << std::endl << segments << std::endl;
return segments;
}
float NIIRS::noiseCriterion(cv::Mat Block, int blockSize_1, float blockVar, float blockStdDev)
{
float blockBeta = 0.0, cenSurDev = 0.0;
cenSurDev = centerSurDev(Block, blockSize_1);
blockBeta = (abs(blockStdDev - cenSurDev)) / (max(blockStdDev, cenSurDev));
return blockBeta;
}
float NIIRS::centerSurDev(cv::Mat Block, int blockSize_1)
{
int center1 = (blockSize_1 + 1) / 2;
int center2 = center1 + 1;
cv::Mat center(16, 2, CV_64FC1), surround(16, 14, CV_64FC1);
int surround_col = 0;
for (int col = 0; col < Block.cols; col++)
{
if (col == center1 - 1)//center col1
{
for (int row = 0; row < Block.rows; row++)
center.at<qreal>(row, 0) = Block.at<qreal>(row, col);
}
else if (col == center2 - 1)//center col2
{
for (int row = 0; row < Block.rows; row++)
center.at<qreal>(row, 1) = Block.at<qreal>(row, col);
}
else//surround
{
for (int row = 0; row < Block.rows; row++)
surround.at<qreal>(row, surround_col) = Block.at<qreal>(row, col);
surround_col = surround_col + 1;
}
}
//std::cout << "Block" << std::endl << Block << std::endl
// << "center" << std::endl << center << std::endl
// << "surround" << std::endl << surround << std::endl;
cv::Scalar meanScalar, stdDevScalar;
float stdDevCenter, stdDevSurround;
cv::meanStdDev(center, meanScalar, stdDevScalar);
stdDevCenter = stdDevScalar.val[0];
cv::meanStdDev(surround, meanScalar, stdDevScalar);
stdDevSurround = stdDevScalar.val[0];
float cenSurDev = stdDevCenter / stdDevSurround;
return cenSurDev;
}
double NIIRS::calculateMTFC_G(cv::Mat kernel)
{
double sqr_sum = 0.0;
cv::Mat floatkernel = kernel;
floatkernel.convertTo(floatkernel, CV_64FC1);
for (int row = 0; row < floatkernel.rows; row++)
for (int col = 0; col < floatkernel.cols; col++)
sqr_sum = sqr_sum + floatkernel.at<qreal>(row, col) * floatkernel.at<qreal>(row, col);
double MTFC_G = sqrt(sqr_sum);
return MTFC_G;
}