| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335 | #include "stdafx.h"#include "BaseFunction.h"#include <opencv2/core/core.hpp>  #include <opencv2/highgui/highgui.hpp> #include <opencv2/opencv.hpp>#include "OTSParticle.h"#include "OTSImageProcessParam.h"#include <OTSFieldData.h>#include "OTSMorphology.h"using namespace cv;using namespace std;using namespace OTSDATA;/***** 求两点间距离*****/float getDistance(Point pointO, Point pointA){	float distance;	distance = powf((pointO.x - pointA.x), 2) + powf((pointO.y - pointA.y), 2);	distance = sqrtf(distance);	return distance;}/***** 点到直线的距离:P到AB的距离*****///P为线外一点,AB为线段两个端点float  getDist_P2L(Point pointP, Point pointA, Point pointB){	//求直线方程	int A = 0, B = 0, C = 0;	A = pointA.y - pointB.y;	B = pointB.x - pointA.x;	C = pointA.x * pointB.y - pointA.y * pointB.x;	//代入点到直线距离公式	float distance = 0;	distance = ((float)abs(A * pointP.x + B * pointP.y + C)) / ((float)sqrtf(A * A + B * B));	return distance;}int  Side(Point P1, Point P2, Point point){	/*Point P1 = line.P1;	Point P2 = line.P2;*/	return ((P2.y - P1.y) * point.x + (P1.x - P2.x) * point.y + (P2.x * P1.y - P1.x * P2.y));}void  FindInnerCircleInContour(vector<Point> contour, Point& center, int& radius){	Rect r = boundingRect(contour);	int nL = r.x, nR = r.br().x; //轮廓左右边界	int nT = r.y, nB = r.br().y; //轮廓上下边界	double dist = 0;	double maxdist = 0;	for (int i = nL; i < nR; i++)  //列	{		for (int j = nT; j < nB; j++)  //行		{			//计算轮廓内部各点到最近轮廓点的距离			dist = pointPolygonTest(contour, Point(i, j), true);			if (dist > maxdist)			{				//求最大距离,只有轮廓最中心的点才距离最大				maxdist = dist;				center = Point(i, j);			}		}	}	radius = maxdist;  //圆半径}BOOL GetParticleAverageChord(std::vector<Point>  listEdge, double a_PixelSize, double& dPartFTD){	// safety check	double nx = 0, ny = 0;	Moments mu;	mu = moments(listEdge, false);	nx = mu.m10 / mu.m00;	ny = mu.m01 / mu.m00;	//circle(cvcopyImg, Point(nx, ny), 1, (255), 1);	Point ptCenter = Point((int)nx, (int)ny);	// coordinate transformation	Point ptPosition;	int radiusNum = 0;	// get ferret diameter	double sumFltDiameter = 0;	int interval;	int edgePointNum = listEdge.size();	if (edgePointNum > 10)	{		interval = edgePointNum / 10;//get one line per 10 degree  aproxemately 	}	else	{		interval = 1;	}	for (int i = 0; i < edgePointNum; i++)	{		Point pt = listEdge[i];		ptPosition.x = abs(pt.x - ptCenter.x);		ptPosition.y = abs(pt.y - ptCenter.y);		if (i % interval == 0)//calculate one line per 10 point ,so to speed up.don't calculate all the diameter.		{			double r1 = sqrt(pow(ptPosition.x, 2) + pow(ptPosition.y, 2));			sumFltDiameter += r1;			radiusNum += 1;			//line(cvImageData, ptCenter, pt, Scalar(nBlackColor), nThickness, nLineType);		}	}	if (radiusNum == 0)	{		dPartFTD = 0;	}	else	{		dPartFTD = a_PixelSize * sumFltDiameter / radiusNum * 2;	}	//imshow("feret center", cvImageData);	return TRUE;}void linearSmooth5(WORD wordIn[], WORD wordOut[], int N = 255)//smooth algorithm{	double in[256];	double out[256];	double smoothCurveData[256];	for (int i = 0; i < 256; i++)	{		in[i] = (double)wordIn[i];	}	int i;	if (N < 5)	{		for (i = 0; i <= N - 1; i++)		{			out[i] = in[i];		}	}	else	{		out[0] = (3.0 * in[0] + 2.0 * in[1] + in[2] - in[4]) / 5.0;		out[1] = (4.0 * in[0] + 3.0 * in[1] + 2 * in[2] + in[3]) / 10.0;		for (i = 2; i <= N - 3; i++)		{			out[i] = (in[i - 2] + in[i - 1] + in[i] + in[i + 1] + in[i + 2]) / 5.0;		}		out[N - 2] = (4.0 * in[N - 1] + 3.0 * in[N - 2] + 2 * in[N - 3] + in[N - 4]) / 10.0;		out[N - 1] = (3.0 * in[N - 1] + 2.0 * in[N - 2] + in[N - 3] - in[N - 5]) / 5.0;	}	for (int i = 0; i < N; i++)	{		wordOut[i] = (WORD)out[i];	}}void BlurImage(CBSEImgPtr inImg){	int rows, cols;	cols = inImg->GetWidth();	rows = inImg->GetHeight();	BYTE* pPixel = inImg->GetImageDataPointer();	Mat cvcopyImg = Mat(rows, cols, CV_8UC1, pPixel);	//Mat blurImg;	//medianBlur(cvcopyImg, cvcopyImg, 11);//get rid of the noise point.	//cv::bilateralFilter	cv::GaussianBlur(cvcopyImg, cvcopyImg, Size(5, 5), 2);	//inImg->SetImageData(cvcopyImg.data, width, height);	/*outImg = inImg;*/}Mat GetMatDataFromBseImg(CBSEImgPtr inImg){	int rows, cols;	cols = inImg->GetWidth();	rows = inImg->GetHeight();	BYTE* pPixel = inImg->GetImageDataPointer();	Mat cvcopyImg = Mat(rows, cols, CV_8UC1, pPixel);	return cvcopyImg;}CBSEImgPtr GetBSEImgFromMat(Mat inImg){	CBSEImgPtr bse = CBSEImgPtr(new CBSEImg(CRect(0, 0, inImg.cols, inImg.rows)));	BYTE* pPixel = inImg.data;	bse->SetImageData(pPixel, inImg.cols, inImg.rows);	return bse;}/***********************************************************增强算法的原理在于先统计每个灰度值在整个图像中所占的比例然后以小于当前灰度值的所有灰度值在总像素中所占的比例,作为增益系数对每一个像素点进行调整。由于每一个值的增益系数都是小于它的所有值所占的比例和。所以就使得经过增强之后的图像亮的更亮,暗的更暗。************************************************************/void ImageStretchByHistogram(const Mat& src, Mat& dst){	//判断传入参数是否正常	if (!(src.size().width == dst.size().width))	{		cout << "error" << endl;		return;	}	double p[256], p1[256], num[256];	memset(p, 0, sizeof(p));	memset(p1, 0, sizeof(p1));	memset(num, 0, sizeof(num));	int height = src.size().height;	int width = src.size().width;	long wMulh = height * width;	//统计每一个灰度值在整个图像中所占个数	for (int x = 0; x < width; x++)	{		for (int y = 0; y < height; y++)		{			uchar v = src.at<uchar>(y, x);			num[v]++;		}	}	//使用上一步的统计结果计算每一个灰度值所占总像素的比例	for (int i = 0; i < 256; i++)	{		p[i] = num[i] / wMulh;	}	//计算每一个灰度值,小于当前灰度值的所有灰度值在总像素中所占的比例	//p1[i]=sum(p[j]);	j<=i;	for (int i = 0; i < 256; i++)	{		for (int k = 0; k <= i; k++)			p1[i] += p[k];	}	//以小于当前灰度值的所有灰度值在总像素中所占的比例,作为增益系数对每一个像素点进行调整。	for (int y = 0; y < height; y++)	{		for (int x = 0; x < width; x++) {			uchar v = src.at<uchar>(y, x);			dst.at<uchar>(y, x) = p1[v] * 255 + 0.5;		}	}	return;}//调整图像对比度Mat AdjustContrastY(const Mat& img){	Mat out = Mat::zeros(img.size(), CV_8UC1);	Mat workImg = img.clone();	//对图像进行对比度增强	ImageStretchByHistogram(workImg, out);	return Mat(out);}void CVRemoveBG(const cv::Mat& img, cv::Mat& dst,int bgstart,int bgend, long& nNumParticle){	int min_gray = bgstart;	int max_gray = bgend;	if (img.empty())	{		std::cout << "图像为空";		return;	}	Mat image = img.clone();	if (image.channels() != 1)	{		cv::cvtColor(image, image, cv::COLOR_BGR2GRAY);	}	//lut 查找表 取规定范围的灰度图 排除拼图时四周灰度为255区域 以及 灰度值较低的区域	uchar lutvalues[256];	for (int i = 0; i < 256; i++)	{		if (i < min_gray || i > max_gray)		{			lutvalues[i] = i;			nNumParticle++;		}		else		{			lutvalues[i] = 0;		}	}	cv::Mat lutpara(1, 256, CV_8UC1, lutvalues);	cv::LUT(image, lutpara, image);	cv::Mat out_fill0, out_fill;	//开运算 获得x>5 的元素	cv::morphologyEx(image, out_fill0, cv::MorphTypes::MORPH_OPEN, cv::getStructuringElement(0, cv::Size(5, 1)), cv::Point(-1, -1), 1);	cv::morphologyEx(image, out_fill, cv::MorphTypes::MORPH_OPEN, cv::getStructuringElement(0, cv::Size(1, 5)), cv::Point(-1, -1), 1);	out_fill = out_fill + out_fill0;	//闭运算 	cv::morphologyEx(out_fill, out_fill, cv::MorphTypes::MORPH_CLOSE, cv::getStructuringElement(0, cv::Size(3, 3)), cv::Point(-1, -1), 1);	//二值	cv::threshold(out_fill, out_fill, 1, 255, cv::ThresholdTypes::THRESH_BINARY);	dst = out_fill.clone();}void RemoveBG_old(const cv::Mat& img, cv::Mat& dst, int nBGStart, int nBGEnd,long& nNumParticle){	int w, h;	w = img.cols;	h = img.rows;	BYTE* pSrcImg = img.data;	BYTE* pPixel = new BYTE[w * h];	BYTE* pTempImg = new BYTE[w * h];	for (unsigned int i = 0; i < w*h; i++)	{		if (pSrcImg[i] < nBGStart || pSrcImg[i] > nBGEnd)		{			pPixel[i] = 255;			nNumParticle++;		}		else		{			pPixel[i] = 0;					}			}	int errodDilateParam =5;	if (errodDilateParam > 0)	{		BErode3(pPixel, pTempImg, errodDilateParam, h, w);		BDilate3(pTempImg, pPixel, errodDilateParam, h, w);	}	dst.data = pPixel;	delete[] pTempImg;}
 |