Exemplo n.º 1
0
Mat getBackground(VideoCapture& cap){
    // initailize the count, and the frames
    int i = 1;
    Mat frame, frame2, sum;

    cap >> frame;
    frame2 = frame;

    // the main loop (termiantes when a key is pressed);
    while (true) {
        cap >> frame;
        if (i > 1) {
            frame2 = sum;
        }

        double beta = 1.0 / (i + 1);
        double alpha = 1.0 - beta; 
        addWeighted(frame2, alpha, frame, beta, 0.0, sum);

        imshow("background", sum);
        if(waitKey(30) >= 0) break;
        ++i;
    }

    destroyWindow("background");
    return sum;
}
Exemplo n.º 2
0
void BlendingImages::demo(char * filename1,char * filename2, double inputAlpha){
	 double alpha = 0.5;
	 double beta;

	 Mat src1, src2, dst;

	 /// We use the alpha provided by the user if it is between 0 and 1
	 if( inputAlpha >= 0.0 && inputAlpha <= 1.0 )
	 {
		 alpha = inputAlpha;
	 }

	 /// Read image ( same size, same type )
	 src1 = imread(filename1);
	 src2 = imread(filename2);

	 if( !src1.data ) {
		 printf("Error loading src1 \n");
		 return;
	 }
	 if( !src2.data ) {
		 printf("Error loading src2 \n");
		 return;
	 }

	 /// Create Windows
	 namedWindow("Linear Blend", 1);

	 beta = ( 1.0 - alpha );
	 addWeighted( src1, alpha, src2, beta, 0.0, dst);

	 imshow( "Linear Blend", dst );

	 waitKey(0);
}
Exemplo n.º 3
0
imageCapture::imageCapture(Mat image, int& counter, int amountOfColors, int AmountOfProjectedPointsPerColor, int totalProjectedColumns, vector<vector<Point>>& twoDPointSpace) {
	colorObject redLower("redLower"), redUpper("redUpper"), green("green"), blue("blue"), white("white");

	// get blue points
	cvtColor(image, HSV, COLOR_BGR2HSV);
	inRange(HSV, blue.getHSVmin(), blue.getHSVmax(), threshold);
	morphOps(threshold);
	trackFilteredObject(blue, threshold, image, counter, twoDPointSpace);

	// get red points
	cvtColor(image, HSV, COLOR_BGR2HSV);
	inRange(HSV, redLower.getHSVmin(), redLower.getHSVmax(), lowerRedImage);
	inRange(HSV, redUpper.getHSVmin(), redUpper.getHSVmax(), upperRedImage);
	addWeighted(lowerRedImage, 1.0, upperRedImage, 1.0, 0.0, threshold);
	morphOps(threshold);
	trackFilteredObject(redUpper, threshold, image, counter, twoDPointSpace);

	//get green points
	cvtColor(image, HSV, COLOR_BGR2HSV);
	inRange(HSV, green.getHSVmin(), green.getHSVmax(), threshold);
	morphOps(threshold);
	trackFilteredObject(green, threshold, image, counter, twoDPointSpace);

	//get white points
	cvtColor(image, HSV, COLOR_BGR2HSV);
	inRange(HSV, white.getHSVmin(), white.getHSVmax(), threshold);
	morphOps(threshold);
	trackFilteredObject(white, threshold, image, counter, twoDPointSpace);

	counter++;
}
Exemplo n.º 4
0
Mat ImageAnalysis::getColoredAreas(){
	if( !m_areaCol_ok ) genColoredAreas();


	IplImage gray = m_depthf;
	IplImage rgb = m_rgb;
	cvMerge(&gray, &gray, &gray, NULL, &rgb);
	addWeighted(m_rgb,0.5f,m_areaCol,0.5f,0,m_rgb);
	return m_rgb;
}
Exemplo n.º 5
0
	void HazeRemove::gui(Mat& src, string wname)
	{
		namedWindow(wname);

		int mode = 0;
		createTrackbar("mode", wname, &mode, 1);
		int alpha = 0;
		createTrackbar("alpha", wname, &alpha, 100);
		int hazerate = 10;
		createTrackbar("hazerate", wname, &hazerate, 100);
		int hazesize = 4;
		createTrackbar("hazesize", wname, &hazesize, 100);
		int ksize = 15;
		createTrackbar("ksize", wname, &ksize, 100);
		int e = 6;
		createTrackbar("e", wname, &e, 255);

		int key = 0;
		while (key != 'q')
		{
			if (mode == 0)
			{
				Mat show;
				Mat destC;
				Mat destDark;
				{
					//CalcTime t("dehaze");
					operator()(src, show, hazesize, hazerate / 100.0, ksize, e / 10.0);

				}
				showTransmissionMap(destC, true);
				Mat a;
				getAtmosphericLightImage(a);
				imshow("a light", a);
				addWeighted(src, alpha / 100.0, destC, 1.0 - alpha / 100.0, 0.0, destC);
				imshow(wname, destC);
				imshow("dehaze", show);

			}
			else
			{
				double mn, mx;
				Mat srcg;
				cvtColor(src, srcg, CV_RGB2YUV);
				minMaxLoc(srcg, &mn, &mx);
				Mat dest = src - CV_RGB(mn, mn, mn);

				dest.convertTo(dest, CV_8UC3, 255 / (mx - mn));
				imshow("dehaze", dest);
			}

			key = waitKey(33);
		}
	}
void MainWindow::addImage()
{
    if(!activeMdiChild()) return;
    CvGlWidget *actCV = activeMdiChild();
    double alpha = 0.75;
    double beta = ( 1.0 - alpha );
    addWeighted( actCV->cvImage, alpha, actCV->inputImage, beta, 0.0, actCV->cvImage);
    actCV->showImage(actCV->cvImage);
    actCV->setWindowTitle("Blended");
    actCV->buildHistogram();
    makeHistogram();
}
Exemplo n.º 7
0
int CPlateDetect::showResult(const Mat& result) {
  namedWindow("EasyPR", CV_WINDOW_AUTOSIZE);

  const int RESULTWIDTH = 640;   // 640 930
  const int RESULTHEIGHT = 540;  // 540 710

  Mat img_window;
  img_window.create(RESULTHEIGHT, RESULTWIDTH, CV_8UC3);

  int nRows = result.rows;
  int nCols = result.cols;

  Mat result_resize;
  if (nCols <= img_window.cols && nRows <= img_window.rows) {
    result_resize = result;

  } else if (nCols > img_window.cols && nRows <= img_window.rows) {
    float scale = float(img_window.cols) / float(nCols);
    resize(result, result_resize, Size(), scale, scale, CV_INTER_AREA);

  } else if (nCols <= img_window.cols && nRows > img_window.rows) {
    float scale = float(img_window.rows) / float(nRows);
    resize(result, result_resize, Size(), scale, scale, CV_INTER_AREA);

  } else if (nCols > img_window.cols && nRows > img_window.rows) {
    Mat result_middle;
    float scale = float(img_window.cols) / float(nCols);
    resize(result, result_middle, Size(), scale, scale, CV_INTER_AREA);

    if (result_middle.rows > img_window.rows) {
      float scale = float(img_window.rows) / float(result_middle.rows);
      resize(result_middle, result_resize, Size(), scale, scale, CV_INTER_AREA);

    } else {
      result_resize = result_middle;
    }
  } else {
    result_resize = result;
  }

  Mat imageRoi = img_window(Rect((RESULTWIDTH - result_resize.cols) / 2,
                                 (RESULTHEIGHT - result_resize.rows) / 2,
                                 result_resize.cols, result_resize.rows));
  addWeighted(imageRoi, 0, result_resize, 1, 0, imageRoi);

  imshow("EasyPR", img_window);
  waitKey();

  destroyWindow("EasyPR");

  return 0;
}
Exemplo n.º 8
0
//Generates Intensity Image
Mat Get_Intensity_Image(Mat inputImage)
{
#ifdef GPU_MODE
	gpu::GpuMat d_src(inputImage);
	Mat retImage;
	gpu::GpuMat gputemp;
	vector<gpu::GpuMat> d_vec;
	gpu::split(d_src, d_vec);
	gpu::addWeighted(d_vec[0], 1.0/3, d_vec[1], 1.0/3, 0.0, gputemp, -1);
	gpu::addWeighted(gputemp, 1, d_vec[2], 1.0/3, 0.0, gputemp, -1);
	gputemp.download(retImage);
asdf
#else
	vector<Mat> rgb;
	Mat cputemp;
	Mat retImage;
	split(inputImage, rgb);
	addWeighted(rgb[0], 1.0/3, rgb[1], 1.0/3, 0.0, cputemp, -1);
	addWeighted(cputemp, 1, rgb[2], 1.0/3, 0.0, retImage, -1);
#endif
	return retImage;
}
void CTargetDetector::filterByColors(std::vector<Scalar> colorsLow, std::vector<Scalar> colorsHigh) {
    Mat hsvImage;
    cvtColor(bufferImg, hsvImage, COLOR_BGR2HSV);
    
    Mat colorsFilteredImage;
    Mat buffer;
    for (int i = 0; i < colorsLow.size(); ++i) {
        inRange(sourceImg, colorsLow[i], colorsHigh[i], buffer);
        addWeighted(buffer, 1.0, colorsFilteredImage, 1.0, 0.0, colorsFilteredImage);
    }
    
    colorsFilteredImage.copyTo(bufferImg);
}
Exemplo n.º 10
0
bool cameraSettings2d::mixImagesToColor(){
    if(isUpdated)
    {
        cam->retriveDepthAndMask(nC,depths[nC],masks[nC]);
        cam->retriveImage(nC,imgs[nC]);
        depths[nC].convertTo(depthsT[nC],CV_8UC1,255.0/6000);
        depths[nC].convertTo(depthsS[nC],CV_32FC1,1.0/6000);
        cvtColor(depthsT[nC],depthsC[nC],CV_GRAY2RGB);
        addWeighted( depthsC[nC], alpha, imgs[nC], 1.0-alpha, 0.0, mixs[nC]);
        cvtColor(mixs[nC],mixs[nC],CV_BGR2RGB);
        return true;
    }
    return false;
}
Exemplo n.º 11
0
void COrientationFilter::filterImage(const Mat& input_img, Mat& filtered_img)
{
    // Step 1: get for each image pixel the gradient for each color channel
    Mat orientation_grad_resp;
    Mat grad_x, grad_y;

    // Blur
    Mat blurred_img;
    GaussianBlur(input_img, blurred_img, Size(3,3), 0, 0, BORDER_DEFAULT);
    // Gradient X (right)
    Sobel(blurred_img, grad_x, ddepth_, 1, 0, 3, scale_, delta_, BORDER_DEFAULT);
    // Gradient Y (down)
    Sobel(blurred_img, grad_y, ddepth_, 0, 1, 3, scale_, delta_, BORDER_DEFAULT);

    grad_x = abs(grad_x);
    grad_y = abs(grad_y);

    // threshold to zero to neglect gradients in opposite directions
    threshold(grad_x, grad_x, to_zero_threshold_, 0, THRESH_TOZERO);
    threshold(grad_y, grad_y, to_zero_threshold_, 0, THRESH_TOZERO);

    // superposition of both gradients: neglect gradients that are also strong to the perpenticular direction
    switch(direction_)
    {
    case HORIZ:
        addWeighted(grad_x, -0.707107, grad_y, 0.707107, 0, orientation_grad_resp);
        break;
    case VERT:
        addWeighted(grad_x, 0.707107, grad_y, -0.707107, 0, orientation_grad_resp);
        break;
    }

    //normalize(orientation_grad_resp, orientation_grad_resp, 0, 255, NORM_MINMAX);
    filtered_img = Mat::zeros(orientation_grad_resp.size(), CV_8UC1);
    orientation_grad_resp.convertTo(filtered_img, CV_8UC1);
}
Exemplo n.º 12
0
int EdgeHandle::singleEdgeHandle( string InImgPosition , string InImageName, string OutImgPosition, string OutImgName ){

	Mat src, src_gray;
	Mat grad;
	int scale = 1;
	int delta = 0;
	int ddepth = CV_16S;

	int c;

	/// Load an image
	src = imread(InImgPosition+InImageName);
	if( !src.data )
	{ return -1; }

	GaussianBlur( src, src, Size(3,3), 0, 0, BORDER_DEFAULT );

	/// Convert it to gray
	cvtColor( src, src_gray, CV_RGB2GRAY );

	/// Generate grad_x and grad_y
	Mat grad_x, grad_y;
	Mat abs_grad_x, abs_grad_y;

	//sobel
	Sobel( src_gray, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT );
	convertScaleAbs( grad_x, abs_grad_x );
	Sobel( src_gray, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT );
	convertScaleAbs( grad_y, abs_grad_y );
	addWeighted( abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad );

	// binary image.
	//threshold(grad,grad,0,255,THRESH_BINARY);

	// Apply the specified morphology operation
	int morph_size = 1;
	Mat element = getStructuringElement( 1, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) );
	morphologyEx( grad, grad, 5, element );
	morphologyEx( grad, grad, 5, element );
	morphologyEx( grad, grad, 5, element );

	// Create window
	string outs = OutImgPosition + OutImgName;
	imwrite(outs,grad);

	return 0;

}
Exemplo n.º 13
0
void *color_processing(void *pointer) {
	Mat *imgPtr = (Mat *) pointer;
	vector < Mat > color_map;
	
	color_map = Normalize_color(*imgPtr, IntensityImg);
	vector < Mat > RGBYMap(6);

	for (int i = 0; i < 6; i++)
		addWeighted(color_map[i], 0.5, color_map[i + 6], 0.5, 0, RGBYMap[i],
				-1);

	AggColor = aggregateMaps(RGBYMap);
	normalize(AggColor, AggColor, 0, 255, NORM_MINMAX, -1);

	pthread_exit (NULL);
}
Exemplo n.º 14
0
void my_Sobel(cv::Mat& srcImg, cv::Mat& dstImg)
{
    Mat dst_x, dst_y;
    Sobel(srcImg, dst_x, srcImg.depth(), 1, 0, 3);
    Sobel(srcImg, dst_y, srcImg.depth(), 0, 1, 3);
    convertScaleAbs(dst_x, dst_x);
    convertScaleAbs(dst_y, dst_y);
    addWeighted( dst_x, 0.5, dst_y, 0.5, 0, dstImg);

    if(dstImg.channels() == 3)
    {
        cvtColor(dstImg, dstImg, CV_BGR2GRAY);
        cvtColor(dstImg, dstImg, CV_GRAY2BGR);
    }

}
Exemplo n.º 15
0
void harrisCornerDetection() {
	cv::Mat grad_x, grad_y;
	cv::Mat abs_grad_x, abs_grad_y;
	cv::Mat grad;
	cv::Mat R(inputImage.rows,inputImage.cols,CV_64F);
	double maxR = 0;
	Sobel(inputImage,grad_x,CV_16S,1,0,3,1,0,cv::BORDER_DEFAULT);
	Sobel(inputImage,grad_y,CV_16S,0,1,3,1,0,cv::BORDER_DEFAULT);
	convertScaleAbs(grad_x,abs_grad_x);
	convertScaleAbs(grad_y,abs_grad_y);
	addWeighted(abs_grad_x,0.5,abs_grad_y,0.5,0,grad);
	for (int x=0;x<inputImage.cols;x++) {
		for (int y=0;y<inputImage.rows;y++) {
			double A = 0;
			double B = 0;
			double C = 0;
			for (int u=-templateSize/2;u<=templateSize/2;u++) {
				for (int v=-templateSize/2;v<=templateSize/2;v++) {
					int columnIndex = x+u;
					int rowIndex = y+v;
					if (columnIndex<0 || columnIndex>=inputImage.cols) {
						continue;
					}
					if (rowIndex<0 || rowIndex>=inputImage.rows) {
						continue;
					}
					A += pow(abs_grad_x.at<uchar>(rowIndex,columnIndex),2);
					B += abs_grad_x.at<uchar>(rowIndex,columnIndex)*abs_grad_y.at<uchar>(rowIndex,columnIndex);
					C += pow(abs_grad_y.at<uchar>(rowIndex,columnIndex),2);
				}
			}
			double cornerness = A*C-B*B-0.04*(A+C);
			R.at<double>(y,x) = cornerness;
			maxR = std::max(cornerness,maxR);
		}
	}
	cv::Mat annotatedImage;
	cv::cvtColor(inputImage,annotatedImage,CV_GRAY2RGB);
	for (int x=0;x<inputImage.cols;x++) {
		for (int y=0;y<inputImage.rows;y++) {
			if (R.at<double>(y,x) >= maxR*threshold) {
				circle(annotatedImage,cvPoint(x,y),2,CV_RGB(0,0,255));
			}
		}
	}
	imshow("Harris Corner Detection",annotatedImage);
}
CMat& CMat::soble(){
	Mat src_gray;
	pic.copyTo(src_gray);
	pic.release();
	int scale = 1,delta = 0,ddepth = CV_16S;
	Mat grad_x, grad_y;
	Mat abs_grad_x, abs_grad_y;
	Sobel(src_gray, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT);
	convertScaleAbs(grad_x, abs_grad_x );
	Sobel(src_gray, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT);
	convertScaleAbs(grad_y, abs_grad_y);
	addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, pic);
	src_gray.release();
	grad_x.release(), grad_y.release();
	abs_grad_x.release(), abs_grad_y.release();
	return (*this);
}
Exemplo n.º 17
0
void alpha::process(const cv::Mat &img_input, cv::Mat &img_output)
{
  if(img_input.empty())
    return;
  img = Mat(img_input);
  if(firstTime)
  {
    int width = img_input.size().width;
    int height = img_input.size().height;
	img.copyTo(background);
	//background = Scalar::all(0);
  }
  
  addWeighted(background, alphaParam, img, 1-alphaParam, 0.0, background);

  background.copyTo(img_output);
  firstTime = false;
}
Exemplo n.º 18
0
	void coherenceEnhancingShockFilter(cv::InputArray src, cv::OutputArray dest_, const int sigma, const int str_sigma_, const double blend, const int iter)
	{
		Mat dest = src.getMat();
		const int str_sigma = min(31, str_sigma_);

		for (int i = 0; i < iter; i++)
		{
			Mat gray;
			if (src.channels() == 3)cvtColor(dest, gray, CV_BGR2GRAY);
			else gray = dest;

			Mat eigen;
			if (gray.type() == CV_8U || gray.type() == CV_32F || gray.type() == CV_64F)
				cornerEigenValsAndVecs(gray, eigen, str_sigma, 3);
			else
			{
				Mat grayf; gray.convertTo(grayf, CV_32F);
				cornerEigenValsAndVecs(grayf, eigen, str_sigma, 3);
			}

			vector<Mat> esplit(6);
			split(eigen, esplit);
			Mat x = esplit[2];
			Mat y = esplit[3];
			Mat gxx;
			Mat gyy;
			Mat gxy;
			Sobel(gray, gxx, CV_32F, 2, 0, sigma);
			Sobel(gray, gyy, CV_32F, 0, 2, sigma);
			Sobel(gray, gxy, CV_32F, 1, 1, sigma);

			Mat gvv = x.mul(x).mul(gxx) + 2 * x.mul(y).mul(gxy) + y.mul(y).mul(gyy);

			Mat mask;
			compare(gvv, 0, mask, cv::CMP_LT);

			Mat di, ero;
			erode(dest, ero, Mat());
			dilate(dest, di, Mat());
			di.copyTo(ero, mask);
			addWeighted(dest, blend, ero, 1.0 - blend, 0.0, dest);
		}
		dest.copyTo(dest_);
	}
Exemplo n.º 19
0
void caasCLR4Tx1::FindIsolatorAngle()
{
	//We cut 4/5 width of isolator out
	int height = 3 * (isolatorBottomEdge - isolatorTopEdge) / 2; int middle = (isolatorBottomEdge + isolatorTopEdge) / 2;
	Rect rect = Rect(isolatorRightEdge - 4 * isolatorWidth / 5, middle - height / 2, 4 * isolatorWidth / 5, height);
	Mat imageIsolator = imageGray(rect);
	int scale = 4;
	resize(imageIsolator, imageIsolator, Size(imageIsolator.cols / scale, imageIsolator.rows / scale));
#if _DEBUG
	imwrite("Isolator.jpg", imageIsolator);
#endif

	//sharpen the image
	//Unsharping masking: Use a Gaussian smoothing filter and subtract the smoothed version from the original image (in a weighted way so the values of a constant area remain constant). 
	Mat imageBlurred, imageGraySharpened;	double GAUSSIAN_RADIUS = 4.0;
	GaussianBlur(imageIsolator, imageBlurred, Size(0, 0), GAUSSIAN_RADIUS);
	addWeighted(imageIsolator, 1.5, imageBlurred, -0.5, 0, imageGraySharpened);
#if _DEBUG
	//imageGraySharpened = imageGrayQuarter;
	imwrite("IsolatorSharpened.jpg", imageGraySharpened);
#endif

	imageGraySharpened = imageIsolator;

	//Histogram Equalization
	equalizeHist(imageGraySharpened, imageGraySharpened);
#if _DEBUG
	imwrite("IsolatorEqualized.jpg", imageGraySharpened);
#endif

	//Otsu binarization
	Mat imageOtsu; threshold(imageGraySharpened, imageOtsu, 0, 255, CV_THRESH_BINARY | CV_THRESH_OTSU);
#if _DEBUG
	imwrite("IsolatorOtsu.jpg", imageOtsu);
#endif

	//Canny Edge Detection
	int median = Median(imageGraySharpened);
	Mat imageCanny;  Canny(imageGraySharpened, imageCanny, 0.66 * median, 1.33 * median);
#if _DEBUG
	imwrite("IsolatorCanny.jpg", imageCanny);
#endif

}
Exemplo n.º 20
0
/**
 * Main processing function.
 * Read input image and create vector of images for each digit.
 */
void ImageProcessor::process() {
    _digits.clear();
    // convert to gray
    cvtColor(_img, _imgGray, CV_BGR2GRAY);
    fastNlMeansDenoising(_imgGray, _imgGray, 10);
    cv::imshow("Denoising", _imgGray);	
    _imgCalque = imread("./images/calque.png");
    cvtColor(_imgCalque, _imgCalqueGray, CV_BGR2GRAY);
    addWeighted( _imgGray, 0.65, _imgCalqueGray, 0.35, 0.0, _imgGray);
    cv::imshow("ImageProcessor1", _imgGray);
    threshold(_imgGray, _imgGray, 135, 255, 3);	 
     cv::imshow("ImageProcessor2", _imgGray);
    // find and isolate counter digits
    findCounterDigits();

    if (_debugWindow) {
        showImage();
    }
}
Exemplo n.º 21
0
Mat EdgeHandle::MatIllumination(Mat img){

	Mat src, src_gray;
	Mat grad;
	int scale = 1;
	int delta = 0;
	int ddepth = CV_16S;

	int c;

	/// Load an image
	src = img;

	GaussianBlur( src, src, Size(3,3), 0, 0, BORDER_DEFAULT );

	/// Convert it to gray
	cvtColor( src, src_gray, CV_RGB2GRAY );

	/// Generate grad_x and grad_y
	Mat grad_x, grad_y;
	Mat abs_grad_x, abs_grad_y;

	//sobel
	Sobel( src_gray, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT );
	convertScaleAbs( grad_x, abs_grad_x );
	Sobel( src_gray, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT );
	convertScaleAbs( grad_y, abs_grad_y );
	addWeighted( abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad );

	// binary image.
	//threshold(grad,grad,0,255,THRESH_BINARY);

	// Apply the specified morphology operation
	int morph_size = 1;
	Mat element = getStructuringElement( 1, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) );
	morphologyEx( grad, grad, 5, element );
	morphologyEx( grad, grad, 5, element );
	morphologyEx( grad, grad, 5, element );

	return grad;

}
Exemplo n.º 22
0
void FWImage::createGrad(int scale,int delta)
{
    /// Generate grad_x and grad_y
    Mat grad_x, grad_y;
    Mat abs_grad_x, abs_grad_y;
    int ddepth = CV_16S;
    
    /// Gradient X
    //Scharr( src_gray, grad_x, ddepth, 1, 0, scale, delta, BORDER_DEFAULT );
    Sobel( this->iGray, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT );
    convertScaleAbs( grad_x, abs_grad_x );
    
    /// Gradient Y
    //Scharr( src_gray, grad_y, ddepth, 0, 1, scale, delta, BORDER_DEFAULT );
    Sobel( this->iGray, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT );
    convertScaleAbs( grad_y, abs_grad_y );
    
    /// Total Gradient (approximate)
    addWeighted( abs_grad_x, 0.5, abs_grad_y, 0.5, 0, this->iGrad );
    
}
Exemplo n.º 23
0
/**
 * Efeito de pintura.
 */
void edgeDetectSobel(Mat& img) {
	Mat gray, temp;
	GaussianBlur(img, img, Size(3, 3), 0, 0);
	cvtColor(img, gray, CV_RGB2GRAY);
	//x
	Sobel(gray, img, CV_16S, 1, 0, 3, 1.5, 0.3, BORDER_DEFAULT);

	convertScaleAbs(img, img);
	//y
	Sobel(gray, temp, CV_16S, 0, 1, 3, 1.5, 0.4, BORDER_DEFAULT);

	convertScaleAbs(temp, temp);
	addWeighted(img, .3, temp, .3, .3, img);

	erode(img, img,
			getStructuringElement(MORPH_ELLIPSE,
					Size(dilation_size + 1, dilation_size + 1),
					Point(dilation_size, dilation_size)));
	bitwise_not(img, img);
	cvtColor(img, img, CV_GRAY2BGR);

}
Exemplo n.º 24
0
void RoadDetection::drawRoadShape(Mat frame, vector<Point> points, Scalar color, double alpha)
{
	Mat copy;
	Point shapePoints[1][3];
	int npt[] = { 3 };

	if (points.size() <= 0)
	{
		return;
	}

	for (size_t i = 0; i < points.size(); i++)
	{
		shapePoints[0][i] = points[i];
	}
		
	const Point* ppt[1] = { shapePoints[0] };
	frame.copyTo(copy);

	fillPoly(copy, ppt, npt, 1, color, CV_AA);
	addWeighted(copy, alpha, frame, 1.0 - alpha, 0.0, frame);
}
Exemplo n.º 25
0
Mat gradient(Mat src) {
	int scale = 1;
	int delta = 0;
	int ddepth = CV_16S;
	Mat grad_x, grad_y;
	Mat abs_grad_x, abs_grad_y, grad, gradh, gradl;

	/// Gradient X
	//Scharr( src_gray, grad_x, ddepth, 1, 0, scale, delta, BORDER_DEFAULT );
	Sobel(src, grad_x, ddepth, 1, 0, 3, scale, delta, BORDER_DEFAULT);
	convertScaleAbs(grad_x, abs_grad_x);

	/// Gradient Y
	//Scharr( src_gray, grad_y, ddepth, 0, 1, scale, delta, BORDER_DEFAULT );
	Sobel(src, grad_y, ddepth, 0, 1, 3, scale, delta, BORDER_DEFAULT);
	convertScaleAbs(grad_y, abs_grad_y);

	/// Total Gradient (approximate)
	addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad);
	
	return grad;
}
Exemplo n.º 26
0
void caasCLR4Tx1::RefineIsolator()
{
	//int width = isolatorRightEdge - isolatorLeftEdge;
	//int height = isolatorBottomEdge - isolatorTopEdge;
	//Expand horizontally 1 time to the right; expand vertically 1/2

	int left = isolatorLeftEdge;
	int right = left + 2 * isolatorWidth; if (right > targetLeftEdge - 20) right = targetLeftEdge - 20;
	int top = isolatorTopEdge - isolatorHeight / 4;
	int bottom = top + isolatorHeight + isolatorHeight / 2;
	Rect roi = Rect(left, top, right - left, bottom - top);
	imageIsolatorRoi = imageGray(roi);
#if _DEBUG
	imwrite("5.1.IsolatorRoi.jpg", imageIsolatorRoi);
#endif

	int scale = 4;
	resize(imageIsolatorRoi, imageIsolatorRoi, Size(imageIsolatorRoi.cols / scale, imageIsolatorRoi.rows / scale));

	Mat imageBlurred, imageGraySharpened;	double GAUSSIAN_RADIUS = 4.0;
	GaussianBlur(imageIsolatorRoi, imageBlurred, Size(0, 0), GAUSSIAN_RADIUS);
	addWeighted(imageIsolatorRoi, 2.5, imageBlurred, -1.5, 0, imageGraySharpened);
#if _DEBUG
	imwrite("5.2.IsolatorRoiSharpened.jpg", imageGraySharpened);
#endif

	int median = Median(imageGraySharpened);
	Mat imageCanny; Canny(imageGraySharpened, imageCanny, 0.66 * median, 1.33 * median);
#if _DEBUG
	imwrite("5.3.IsolatorRoiCanny.jpg", imageCanny);
#endif

	Mat Points;	findNonZero(imageCanny, Points);
	Rect Min_Rect = boundingRect(Points);
	isolatorRightEdge = roi.x + (Min_Rect.x + Min_Rect.width) * scale;
	RotatedRect rect = minAreaRect(Points);
	isolatorAngle = rect.angle;
}
Exemplo n.º 27
0
Mat AntiShake::BorderDetector(Mat src, int type) {
	Mat dst;
	if (type == 0) {
		// ----- Sobel:
		Mat grad_x, grad_y, grad;
		Mat abs_grad_x, abs_grad_y;
		int ddepth = CV_16S;

		int border = BORDER_ISOLATED;
		Sobel(src, grad_x, ddepth, 1, 0, 3, 1, 1, border);
		convertScaleAbs(grad_x, abs_grad_x);

		Sobel(src, grad_y, ddepth, 0, 1, 3, 1, 1, border);
		convertScaleAbs(grad_y, abs_grad_y);

		addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad);
		grad.copyTo(dst);
		//	threshold(dst, dst, 150, 255, cv::THRESH_TOZERO);
	} else if (type == 1) {
		// ----- Canny Corner Detector
		Canny(src, dst, 200, 6000, 5, true);
	} else if (type == 2) {
		// ----- Laplacian:
		Laplacian(src, dst, CV_8UC1, 1, 1, 1);
	} else {
		// ----- Magic Laplacian:
		cv::Laplacian(src, dst, CV_32F, 9);
		double scale = -1.0;
		if (scale < 0) {
			double lapmin, lapmax;
			cv::minMaxLoc(dst, &lapmin, &lapmax);
			scale = 255 / std::max(-lapmin, lapmax);
		}
		dst.convertTo(dst, CV_8U, scale, 256);
	}
	return dst;
}
void guiAlphaBlend(const Mat& src1, const Mat& src2)
{
	Mat s1,s2;
	if(src1.channels()==1)cvtColor(src1,s1,CV_GRAY2BGR);
	else s1 = src1;
	if(src2.channels()==1)cvtColor(src2,s2,CV_GRAY2BGR);
	else s2 = src2;
	namedWindow("alphaBlend");
	int a = 0;
	createTrackbar("a","alphaBlend",&a,100);
	int key = 0;
	Mat show;
	while(key!='q')
	{
		addWeighted(s1,1.0-a/100.0,s2,a/100.0,0.0,show);
		imshow("alphaBlend",show);
		key = waitKey(1);
		if(key=='f')
		{
			a = (a > 0) ? 0 : 100;
			setTrackbarPos("a","alphaBlend",a);
		}
	}
}
Exemplo n.º 29
0
// !车牌识别模块
int CPlateRecognize::plateRecognize(Mat src, std::vector<string> &licenseVec) {
    // 车牌方块集合
    vector<CPlate> plateVec;

    // 进行深度定位,使用颜色信息与二次Sobel
    int resultPD = plateDetect(src, plateVec, getPDDebug(), 0);

    if (resultPD == 0) {
        int num = plateVec.size();
        int index = 0;

        //依次识别每个车牌内的符号
        for (int j = 0; j < num; j++) {
            CPlate item = plateVec[j];
            Mat plate = item.getPlateMat();

            //获取车牌颜色
            string plateType = getPlateColor(plate);

            //获取车牌号
            string plateIdentify = "";
            int resultCR = charsRecognise(plate, plateIdentify);
            if (resultCR == 0) {
                string license = plateType + ":" + plateIdentify;
                licenseVec.push_back(license);
            }
        }
        //完整识别过程到此结束

        //如果是Debug模式,则还需要将定位的图片显示在原图左上角
        if (getPDDebug() == true) {
            Mat result;
            src.copyTo(result);

            for (int j = 0; j < num; j++) {
                CPlate item = plateVec[j];
                Mat plate = item.getPlateMat();

                int height = 36;
                int width = 136;
                if (height * index + height < result.rows) {
                    Mat imageRoi = result(Rect(0, 0 + height * index, width, height));
                    addWeighted(imageRoi, 0, plate, 1, 0, imageRoi);
                }
                index++;

                RotatedRect minRect = item.getPlatePos();
                Point2f rect_points[4];
                minRect.points(rect_points);

                Scalar lineColor = Scalar(255, 255, 255);

                if (item.getPlateLocateType() == SOBEL) lineColor = Scalar(255, 0, 0);

                if (item.getPlateLocateType() == COLOR) lineColor = Scalar(0, 255, 0);

                for (int j = 0; j < 4; j++)
                    line(result, rect_points[j], rect_points[(j + 1) % 4], lineColor, 2,
                         8);
            }

            //显示定位框的图片
            showResult(result);
        }
    }

    return resultPD;
}
Exemplo n.º 30
0
void MapsMerge::ImagesMergerStrategy::mergeImages(ImagesMatches& imgsMatches) {
	addWeighted(imgsMatches.imgFeatures2.img, 0.5, imgsMatches.transformedImage, 1, 0.0, imgsMatches.mergedImages);
}