|
@@ -405,7 +405,7 @@ namespace OTSIMGPROC
|
|
|
|
|
|
|
|
|
|
|
|
- }
|
|
|
+ }
|
|
|
|
|
|
COTSImageProcess::COTSImageProcess()
|
|
|
{
|
|
@@ -1946,328 +1946,9 @@ namespace OTSIMGPROC
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
- BOOL COTSImageProcess::SplitRawParticleIntoMatricsParticle(COTSParticlePtr a_pOTSPart,int imageWidth,int imageHeight, double a_PixelSize, double a_XrayStep)
|
|
|
- {
|
|
|
- //--------- convert this particle data to image data,construct an image only with this particle.------
|
|
|
- const int nExpand_Size = 3;
|
|
|
- const int nWhiteColor = 0;
|
|
|
- const int nThickness = 1;
|
|
|
- // lineType Type of the line
|
|
|
- const int nLineType = 8;
|
|
|
- // get rectangle of the particle
|
|
|
- CRect rect = a_pOTSPart->GetParticleRect();
|
|
|
- if (a_pOTSPart->GetActualArea() < 30 * a_PixelSize)// the particle is too small that openCV can't calculate a width value of it. Then we take the upright rect of the particle as it's minArea rect.
|
|
|
- {
|
|
|
- double w = 0, h = 0;
|
|
|
- w = (double)rect.Width() * a_PixelSize;
|
|
|
- h = (double)rect.Height() * a_PixelSize;
|
|
|
- a_pOTSPart->SetDMax(MAX(w, h));
|
|
|
- a_pOTSPart->SetDMin(MIN(w, h));
|
|
|
- a_pOTSPart->SetDMean((w + h) / 2);
|
|
|
- a_pOTSPart->SetFeretDiameter((w + h) / 2);
|
|
|
- a_pOTSPart->SetDElong(MAX(w, h));
|
|
|
- a_pOTSPart->SetPerimeter((w + h) * 2);
|
|
|
- a_pOTSPart->SetDPerp(MIN(w, h));
|
|
|
- a_pOTSPart->SetDInscr(MIN(w, h));
|
|
|
- return true;
|
|
|
- }
|
|
|
-
|
|
|
- if (a_XrayStep > 0)
|
|
|
- {
|
|
|
- COTSParticleList matricsParts;
|
|
|
- int xrayStep = a_XrayStep;// *a_PixelSize;
|
|
|
- GetMatricsParticlesFromRawParticle(a_pOTSPart, imageWidth,imageHeight,a_PixelSize, xrayStep, matricsParts);
|
|
|
- a_pOTSPart->SetSubParticles(matricsParts);
|
|
|
- }
|
|
|
- //-----------
|
|
|
- }
|
|
|
- BOOL COTSImageProcess::SplitRawParticleIntoGreyScaleParticle(COTSParticlePtr a_pOTSPart,CDoubleRangePtr ecdRange, double a_PixelSize ,CBSEImgPtr fieldImg)
|
|
|
- {
|
|
|
- //--------- convert this particle data to image data,construct an image only with this particle.------
|
|
|
- const int nExpand_Size = 3;
|
|
|
- const int nWhiteColor = 0;
|
|
|
- const int nThickness = 1;
|
|
|
- // lineType Type of the line
|
|
|
- const int nLineType = 8;
|
|
|
- // get rectangle of the particle
|
|
|
- CRect rect = a_pOTSPart->GetParticleRect();
|
|
|
- if (a_pOTSPart->GetActualArea() < 5 * a_PixelSize)// the particle is too small that openCV can't calculate a width value of it. Then we take the upright rect of the particle as it's minArea rect.
|
|
|
- {
|
|
|
- double w = 0, h = 0;
|
|
|
- w = (double)rect.Width() * a_PixelSize;
|
|
|
- h = (double)rect.Height() * a_PixelSize;
|
|
|
- a_pOTSPart->SetDMax(MAX(w, h));
|
|
|
- a_pOTSPart->SetDMin(MIN(w, h));
|
|
|
- a_pOTSPart->SetDMean((w + h) / 2);
|
|
|
- a_pOTSPart->SetFeretDiameter((w + h) / 2);
|
|
|
- a_pOTSPart->SetDElong(MAX(w, h));
|
|
|
- a_pOTSPart->SetPerimeter((w + h) * 2);
|
|
|
- a_pOTSPart->SetDPerp(MIN(w, h));
|
|
|
- a_pOTSPart->SetDInscr(MIN(w, h));
|
|
|
- return true;
|
|
|
- }
|
|
|
-
|
|
|
- // calculate the particle image data size, expand 3 pixel at the edge
|
|
|
|
|
|
- CBSEImgPtr onePartImg = CBSEImgPtr(new CBSEImg(CRect(0,0, fieldImg->GetWidth(), fieldImg->GetHeight())));
|
|
|
- // get the segment list
|
|
|
- COTSSegmentsList listSegment = a_pOTSPart->GetFeature()->GetSegmentsList();
|
|
|
- for (auto pSegment : listSegment)
|
|
|
- {
|
|
|
- for (int i = 0; i < pSegment->GetLength(); i++)
|
|
|
- {
|
|
|
- int x = pSegment->GetStart() + i;
|
|
|
- int y = pSegment->GetHeight();
|
|
|
- int bseValue = fieldImg->GetBSEValue(x,y);
|
|
|
- onePartImg->SetBSEValue(x, y,bseValue);
|
|
|
-
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- BlurImage(onePartImg);
|
|
|
- std::vector<CIntRangePtr> rngs = CalcuGrayLevelRange(onePartImg);
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- COTSFieldDataPtr partData = COTSFieldDataPtr(new COTSFieldData());
|
|
|
- std::map<int, std::vector<COTSParticlePtr>> partAreaMap;
|
|
|
- for (int i = 0; i < rngs.size(); i++)
|
|
|
- {
|
|
|
- partAreaMap.clear();
|
|
|
-
|
|
|
- GetParticlesBySpecialGrayRange(onePartImg, rngs[i], ecdRange, a_PixelSize, partData);
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- for (auto p : partData->GetParticleList())//sorting and filtering
|
|
|
- {
|
|
|
- /*if (p->GetActualArea() > 50)
|
|
|
- {*/
|
|
|
- partAreaMap[p->GetPixelArea()].push_back(p);
|
|
|
- //}
|
|
|
-
|
|
|
- }
|
|
|
- if(partAreaMap.size()>0)
|
|
|
- {
|
|
|
- auto theBiggestPart = partAreaMap.rbegin()->second[0];
|
|
|
- theBiggestPart->CalXRayPos();
|
|
|
- std::map<int, std::vector<COTSParticlePtr>>::reverse_iterator it;
|
|
|
- auto partsegs = theBiggestPart->GetFeature()->GetSegmentsList();
|
|
|
- it = partAreaMap.rbegin()++;
|
|
|
- for (; it != partAreaMap.rend(); it++)
|
|
|
- {
|
|
|
- for (auto sameAreaP : it->second)
|
|
|
- {
|
|
|
- auto segs = sameAreaP->GetFeature()->GetSegmentsList();
|
|
|
- for (auto s : segs)
|
|
|
- {
|
|
|
- partsegs.push_back(s);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- }
|
|
|
- theBiggestPart->GetFeature()->SetSegmentsList(partsegs, true);
|
|
|
- theBiggestPart->CalCoverRect();
|
|
|
- theBiggestPart->SetFieldId(a_pOTSPart->GetFieldId());
|
|
|
- theBiggestPart->SetAnalysisId(a_pOTSPart->GetAnalysisId());
|
|
|
- a_pOTSPart->AddSubParticle(theBiggestPart);
|
|
|
-
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- return 0;
|
|
|
- }
|
|
|
- BOOL COTSImageProcess::SplitRawParticleIntoWaterShedParticle(COTSParticlePtr a_pOTSPart, double a_PixelSize, CBSEImgPtr fieldImg)
|
|
|
- {
|
|
|
- //--------- convert this particle data to image data,construct an image only with this particle.------
|
|
|
- const int nExpand_Size = 3;
|
|
|
- const int nWhiteColor = 0;
|
|
|
- const int nThickness = 1;
|
|
|
- // lineType Type of the line
|
|
|
- const int nLineType = 8;
|
|
|
- // get rectangle of the particle
|
|
|
- CRect rect = a_pOTSPart->GetParticleRect();
|
|
|
- if (a_pOTSPart->GetActualArea() < 5 * a_PixelSize)// the particle is too small that openCV can't calculate a width value of it. Then we take the upright rect of the particle as it's minArea rect.
|
|
|
- {
|
|
|
- double w = 0, h = 0;
|
|
|
- w = (double)rect.Width() * a_PixelSize;
|
|
|
- h = (double)rect.Height() * a_PixelSize;
|
|
|
- a_pOTSPart->SetDMax(MAX(w, h));
|
|
|
- a_pOTSPart->SetDMin(MIN(w, h));
|
|
|
- a_pOTSPart->SetDMean((w + h) / 2);
|
|
|
- a_pOTSPart->SetFeretDiameter((w + h) / 2);
|
|
|
- a_pOTSPart->SetDElong(MAX(w, h));
|
|
|
- a_pOTSPart->SetPerimeter((w + h) * 2);
|
|
|
- a_pOTSPart->SetDPerp(MIN(w, h));
|
|
|
- a_pOTSPart->SetDInscr(MIN(w, h));
|
|
|
- return true;
|
|
|
- }
|
|
|
-
|
|
|
- // calculate the particle image data size, expand 3 pixel at the edge
|
|
|
-
|
|
|
- CBSEImgPtr onePartImg = CBSEImgPtr(new CBSEImg(CRect(0, 0, fieldImg->GetWidth(), fieldImg->GetHeight())));
|
|
|
- CBSEImgPtr rawOnePartImg = CBSEImgPtr(new CBSEImg(CRect(0, 0, fieldImg->GetWidth(), fieldImg->GetHeight())));
|
|
|
- // get the segment list
|
|
|
- /*for (int i = 0; i < fieldImg->GetWidth(); i++)
|
|
|
- {
|
|
|
- for (int j = 0; j < fieldImg->GetHeight(); j++)
|
|
|
- {
|
|
|
- rawOnePartImg->SetBSEValue(i, j, 255);
|
|
|
- }
|
|
|
- }*/
|
|
|
-
|
|
|
- COTSSegmentsList listSegment = a_pOTSPart->GetFeature()->GetSegmentsList();
|
|
|
- for (auto pSegment : listSegment)
|
|
|
- {
|
|
|
- for (int i = 0; i < pSegment->GetLength(); i++)
|
|
|
- {
|
|
|
- int x = pSegment->GetStart() + i;
|
|
|
- int y = pSegment->GetHeight();
|
|
|
- int bseValue = fieldImg->GetBSEValue(x, y);
|
|
|
- onePartImg->SetBSEValue(x, y, bseValue);
|
|
|
- rawOnePartImg->SetBSEValue(x, y, bseValue);
|
|
|
- }
|
|
|
- }
|
|
|
- //ImshowImage(rawOnePartImg);
|
|
|
- //ImshowChartData(onePartImg);
|
|
|
- BlurImage(onePartImg);
|
|
|
- Mat partMat = GetMatDataFromBseImg(onePartImg);
|
|
|
-
|
|
|
- Canny(partMat, partMat, 10, 300,3);
|
|
|
- /* cv::imshow("ddd2", partMat);
|
|
|
- cv::waitKey();*/
|
|
|
- //查找轮廓
|
|
|
- vector<vector<Point>> contours;
|
|
|
- vector<Vec4i> hierarchy;
|
|
|
- findContours(partMat, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
|
|
|
-
|
|
|
- //Mat imageContours = Mat::zeros(partMat.size(), CV_8UC1); //轮廓
|
|
|
- Mat marks(partMat.size(), CV_32S);
|
|
|
- marks = Scalar::all(0);
|
|
|
- int index = 0;
|
|
|
- int compCount =10;
|
|
|
- for (; index >= 0; index = hierarchy[index][0], compCount++)
|
|
|
- {
|
|
|
- //对marks进行标记,对不同区域的轮廓进行编号,相当于设置注水点,有多少轮廓,就有多少注水点
|
|
|
- //marks与imageContours差别就是在颜色的赋值上,marks是不同轮廓赋予不同的值,imageContours是轮廓赋值白色
|
|
|
- //要绘制轮廓的图像; 所有输入的轮廓,每个轮廓被保存成一个point向量; index指定要绘制轮廓的编号,如果是负数,则绘制所有的轮廓;
|
|
|
- //绘制轮廓所用的颜色; 绘制轮廓的线的粗细,如果是负数,则轮廓内部被填充;
|
|
|
- //绘制轮廓的线的连通性; 关于层级的可选参数,只有绘制部分轮廓时才会用到
|
|
|
-
|
|
|
- drawContours(marks, contours, index, Scalar::all(compCount+1 ), 1, 8, hierarchy);
|
|
|
- //drawContours(imageContours, contours, index, Scalar(255), 1, 8, hierarchy);
|
|
|
- }
|
|
|
- /*cv::imshow("ddd", marks);
|
|
|
- cv::waitKey();*/
|
|
|
- auto rawData = GetMatDataFromBseImg(rawOnePartImg);
|
|
|
- /*cv::imshow("ddd3", rawData);
|
|
|
- cv::waitKey();*/
|
|
|
- Mat imageGray3;
|
|
|
- cvtColor(rawData, imageGray3, CV_GRAY2RGB);//灰度转换
|
|
|
|
|
|
- watershed(imageGray3, marks); //分水岭算法实现
|
|
|
- /*cv::imshow("ddd", marks);
|
|
|
- cv::waitKey();*/
|
|
|
-
|
|
|
- Mat PerspectiveImage = Mat::zeros(imageGray3.size(), CV_8UC1);
|
|
|
- for (int i = 0; i < marks.rows; i++) //maks是区域图
|
|
|
- {
|
|
|
- for (int j = 0; j < marks.cols; j++)
|
|
|
- {
|
|
|
- int index = marks.at<int>(i, j);
|
|
|
- if (marks.at<int>(i, j) == -1)
|
|
|
- {
|
|
|
- PerspectiveImage.at<uchar>(i, j) = 0;
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- PerspectiveImage.at<uchar>(i, j) = index;
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- onePartImg->SetImageData(PerspectiveImage.data,marks.cols,marks.rows);
|
|
|
-
|
|
|
-
|
|
|
- std::vector<CIntRangePtr> rngs;
|
|
|
- for (int i = 10; i< compCount; i++)
|
|
|
- {
|
|
|
- rngs.push_back(CIntRangePtr(new CIntRange(i, i)));
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- CDoubleRangePtr ecdRange = CDoubleRangePtr(new CDoubleRange(0, 1000));
|
|
|
-
|
|
|
- COTSFieldDataPtr partData = COTSFieldDataPtr(new COTSFieldData());
|
|
|
- std::map<int, std::vector<COTSParticlePtr>> partAreaMap;
|
|
|
- for (int i = 0; i < rngs.size(); i++)
|
|
|
- {
|
|
|
- partAreaMap.clear();
|
|
|
-
|
|
|
- GetParticlesBySpecialGrayRange(onePartImg, rngs[i], ecdRange, a_PixelSize, partData);
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- for (auto p : partData->GetParticleList())//sorting and filtering
|
|
|
- {
|
|
|
- auto r1=a_pOTSPart->GetParticleRect();
|
|
|
- auto pnt = p->GetParticleRect().CenterPoint();
|
|
|
- if (pnt.x > r1.left && pnt.x<r1.left + r1.Width() && pnt.y>r1.top && pnt.y < r1.top + r1.Height())
|
|
|
- {
|
|
|
- partAreaMap[p->GetPixelArea()].push_back(p);
|
|
|
- }
|
|
|
- /*if (p->GetActualArea() > 50)
|
|
|
- {*/
|
|
|
- //partAreaMap[p->GetPixelArea()].push_back(p);
|
|
|
- //}
|
|
|
-
|
|
|
- }
|
|
|
- if (partAreaMap.size() > 0)
|
|
|
- {
|
|
|
- auto theBiggestPart = partAreaMap.rbegin()->second[0];
|
|
|
- theBiggestPart->CalXRayPos();
|
|
|
- std::map<int, std::vector<COTSParticlePtr>>::reverse_iterator it;
|
|
|
- auto partsegs = theBiggestPart->GetFeature()->GetSegmentsList();
|
|
|
- it = partAreaMap.rbegin()++;
|
|
|
- for (; it != partAreaMap.rend(); it++)
|
|
|
- {
|
|
|
- for (auto sameAreaP : it->second)
|
|
|
- {
|
|
|
- auto segs = sameAreaP->GetFeature()->GetSegmentsList();
|
|
|
- for (auto s : segs)
|
|
|
- {
|
|
|
- partsegs.push_back(s);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- }
|
|
|
- theBiggestPart->GetFeature()->SetSegmentsList(partsegs, true);
|
|
|
- theBiggestPart->CalCoverRect();
|
|
|
- theBiggestPart->SetFieldId(a_pOTSPart->GetFieldId());
|
|
|
- theBiggestPart->SetAnalysisId(a_pOTSPart->GetAnalysisId());
|
|
|
- a_pOTSPart->AddSubParticle(theBiggestPart);
|
|
|
-
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- return 0;
|
|
|
- }
|
|
|
+
|
|
|
void COTSImageProcess::ImshowImage(CBSEImgPtr img)
|
|
|
{
|
|
|
BYTE* data = img->GetImageDataPointer();
|