// buf must be fftwf_malloc'd with size `sizeof(float) * fftLen`
//
// im should be approx offset from fix by hint.
Point phaseCorr(const float *fix, const float *im, const Size &imSz, const fftwf_plan &plan, float *buf, const Point2f &hint,
				double &bestSqDist, const Mat &mask, Point &maxLoc, float &conf, const ConfGetter getConf, const char *saveIm) {
	unsigned fftLen = getFFTLen(imSz);
	for (unsigned i = 0; i < fftLen; i += 2) {
		float a = fix[i] * im[i] + fix[i + 1] * im[i + 1];
		float b = fix[i + 1] * im[i] - fix[i] * im[i + 1];
		float norm = sqrt(a * a + b * b);
		buf[i] = (norm != 0) ? (a / norm) : 0;
		buf[i + 1] = (norm != 0) ? (b / norm) : 0;
		//printf("wow %f, %f, %f, %f, %f, %f, %f, %f\n", fix[i], fix[i+1], im[i], im[i+1], a, b, buf[i], buf[i + 1]);
	}

	fftwf_execute_dft_c2r(plan, (fftwf_complex *)buf, buf);

	Mat bufMat(imSz.height, imSz.width + 2, CV_32FC1, buf);
	//	bufMat = abs(bufMat);
	blur(bufMat, bufMat, Size(21, 21));

	if (saveIm) {
		saveFloatIm(saveIm, bufMat);
	}

	minMaxLoc(bufMat, NULL, NULL, NULL, &maxLoc, mask);

	// there are four potential shifts corresponding to one peak
	// we choose the shift that is closest to the microscope's guess for the offset between the two
	Point bestPt;
	bestSqDist = 1e99;
	for (int dx = -imSz.width; dx <= 0; dx += imSz.width) {
		for (int dy = -imSz.height; dy <= 0; dy += imSz.height) {
			Point curPt(maxLoc.x + dx, maxLoc.y + dy);
			double curSqDist = getSqDist(curPt, hint);
			if (curSqDist < bestSqDist) {
				bestSqDist = curSqDist;
				bestPt = curPt;
			}
		}
	}

	conf = getConf(bufMat, maxLoc, bestSqDist);

	return bestPt;
}
Example #2
0
/* Draws the heatmap on top of a frame. The frame must be the same size as
 * the heatmap. 
 */
void AttentionMap::overlay(unsigned char* pDestImage, int imageWidth, int imageHeight)
{
	update();

	// Make sure all values are capped at one
	m_heatmap = min(m_ones, m_heatmap);

	Mat temp_map;
	blur(m_heatmap, temp_map, Size(15, 15));

	for (int r = 0; r < m_heatmap.rows; ++r)
	{
		//Vec3b* f_ptr = (Vec3b *)pDestImage;
		float* h_ptr = temp_map.ptr<float>(r);
		for (int c = 0; c < m_heatmap.cols; ++c)
		{
			const float heat_mix = h_ptr[c];
			if (heat_mix > 0.0)
			{
				// in BGR
				const Vec3b i_color = Vec3b(pDestImage[0], pDestImage[1], pDestImage[2]);

				const Vec3b heat_color = 
					hsv_to_bgr(interpolate_hsv(g_heat_color2, g_heat_color1, heat_mix));

				const float heat_mix2 = std::min(heat_mix, g_max_transparency);

				const Vec3b final_color = interpolate(i_color, heat_color, heat_mix2);
				
				//f_ptr[c] = final_color;
				pDestImage[0] = final_color[0];
				pDestImage[1] = final_color[1];
				pDestImage[2] = final_color[2];
			}

			pDestImage+=3;
		}

		pDestImage += (imageWidth - m_heatmap.cols) *3;
	}

	fade();
}
Example #3
0
void *readImage(FILE *inf, int opt, FILE *outf){ 

  int width, height; 
  int max;
  int r,c;
  int pixel;

  // Process the pgm file as a bufft
  // I.e., eat the header
  eatLine(inf);
  eatLine(inf);
  fscanf(inf,"%d",&width);  // the first integer on line 1 is the width
  fscanf(inf,"%d",&height); // second is the height
  fscanf(inf,"%d",&max);   // third is max number of pixels

  float *array = malloc(sizeof(float)*width*height);
  int p;
  // iterate through rows and columns
  // scan in graymap values, convert to pixels
  // fill array with pixel values
  for (r=0; r < height; r++) {
    for (c=0; c < width; c++) {
      fscanf(inf,"%d",&pixel);
      p = c + r*width;
      array[p] = (float)pixel/(float)max;
    }
  }

  // Select function to call
  // 
  switch (opt) {

  case(ASCII):
    echoASCII(array,height,width,outf);
    break;

  case(INV):
    invert(array,height,width,outf);
  case(BLUR):
    blur(array,height,width,outf);
  }
  // exit(-1);
} 
Example #4
0
void captcha(unsigned char im[70*200], unsigned char l[6]) {
	unsigned i;
	unsigned char swr[200];
	uint8_t s1,s2;

	int f=open("/dev/urandom",O_RDONLY);
	read(f,l,5); read(f,swr,200); read(f,dr,sizeof(dr)); read(f,&s1,1); read(f,&s2,1);
	close(f);

	memset(im,0xff,200*70); s1=s1&0x7f; s2=s2&0x3f;
	int p=30;
	for (i = 0; i < 5; i++) {
		l[i] = l[i] % NUM_GLYPHS;
		p=letter(l[i],p,im,swr,s1,s2);
		l[i] = letters[l[i]];
	}
	line(im,swr,s1); dots(im); blur(im);
	l[5] = 0;
}
Example #5
0
static void
run (const gchar      *name,
     gint              nparams,
     const GimpParam  *param,
     gint             *nreturn_vals,
     GimpParam       **return_vals)
{
  static GimpParam  values[1];
  GimpPDBStatusType status = GIMP_PDB_SUCCESS;
  GimpRunMode       run_mode;
  GimpDrawable     *drawable;

  /* Setting mandatory output values */
  *nreturn_vals = 1;
  *return_vals  = values;

  values[0].type = GIMP_PDB_STATUS;
  values[0].data.d_status = status;

  /* Getting run_mode - we won't display a dialog if
   * we are in NONINTERACTIVE mode
   */
  run_mode = param[0].data.d_int32;

  /*  Get the specified drawable  */
  drawable = gimp_drawable_get (param[2].data.d_drawable);

  gimp_progress_init ("My Blur...");

  /* Let's time blur
   *
   *   GTimer timer = g_timer_new time ();
   */

  blur (drawable);

  /*   g_print ("blur() took %g seconds.\n", g_timer_elapsed (timer));
   *   g_timer_destroy (timer);
   */

  gimp_displays_flush ();
  gimp_drawable_detach (drawable);
}
Example #6
0
static void apply_paint_maskfilter(const SkPaint& paint, Json::Value* target, bool sendBinaries) {
    SkMaskFilter* maskFilter = paint.getMaskFilter();
    if (maskFilter != nullptr) {
        SkMaskFilter::BlurRec blurRec;
        if (maskFilter->asABlur(&blurRec)) {
            Json::Value blur(Json::objectValue);
            blur[SKJSONCANVAS_ATTRIBUTE_SIGMA] = Json::Value(blurRec.fSigma);
            switch (blurRec.fStyle) {
                case SkBlurStyle::kNormal_SkBlurStyle:
                    blur[SKJSONCANVAS_ATTRIBUTE_STYLE] = Json::Value(SKJSONCANVAS_BLURSTYLE_NORMAL);
                    break;
                case SkBlurStyle::kSolid_SkBlurStyle:
                    blur[SKJSONCANVAS_ATTRIBUTE_STYLE] = Json::Value(SKJSONCANVAS_BLURSTYLE_SOLID);
                    break;
                case SkBlurStyle::kOuter_SkBlurStyle:
                    blur[SKJSONCANVAS_ATTRIBUTE_STYLE] = Json::Value(SKJSONCANVAS_BLURSTYLE_OUTER);
                    break;
                case SkBlurStyle::kInner_SkBlurStyle:
                    blur[SKJSONCANVAS_ATTRIBUTE_STYLE] = Json::Value(SKJSONCANVAS_BLURSTYLE_INNER);
                    break;
                default:
                    SkASSERT(false);
            }
            switch (blurRec.fQuality) {
                case SkBlurQuality::kLow_SkBlurQuality:
                    blur[SKJSONCANVAS_ATTRIBUTE_QUALITY] = Json::Value(SKJSONCANVAS_BLURQUALITY_LOW);
                    break;
                case SkBlurQuality::kHigh_SkBlurQuality:
                    blur[SKJSONCANVAS_ATTRIBUTE_QUALITY] = Json::Value(SKJSONCANVAS_BLURQUALITY_HIGH);
                    break;
                default:
                    SkASSERT(false);
            }
            (*target)[SKJSONCANVAS_ATTRIBUTE_BLUR] = blur;
        }
        else {
            Json::Value jsonMaskFilter;
            flatten(maskFilter, &jsonMaskFilter, sendBinaries);
            (*target)[SKJSONCANVAS_ATTRIBUTE_MASKFILTER] = jsonMaskFilter;
        }
    }
}
Example #7
0
int
main(int argc, char** argv){

 float dur, time;
 
 if(argc == 3){
 dur = (float) atof(argv[1]);
 time = (float) atof(argv[2]);
  
 }
 else {
 Usage(argv[0]);
 exit(1);
 }

SndRTIO input(1, SND_INPUT);
SndRTIO output(1, SND_OUTPUT);

 HammingTable window(1024, 0.54f);

 SndIn in(&input);
 PVA anal(&window, &in); 
 PVBlur blur(&anal, time);
 PVS synth(&window, &blur);

 output.SetOutput(1, &synth);
 
 int end = dur*DEF_SR/DEF_VECSIZE;
 
 for(int i=0; i<end; i++){ 

   input.Read();
   in.DoProcess();
   anal.DoProcess();
   blur.DoProcess();
   synth.DoProcess();
   output.Write();
}

return 0;

}
vector< vector< Point> > BlobDetection::detectContours(Mat frame, Ptr< BackgroundSubtractor>& pMOG2Pointer , Mat& fgMaskMOG2)
{
	vector< vector< Point> > result;

	cvNamedWindow("Original"	, CV_WINDOW_NORMAL);
	cvNamedWindow("Blurred"		, CV_WINDOW_NORMAL);
	//cvNamedWindow("fgMaskMOG2X"	, CV_WINDOW_NORMAL);
	cvNamedWindow("Background Subtracted", CV_WINDOW_NORMAL);
	cvNamedWindow("Shadow Removed"	, CV_WINDOW_NORMAL);

	Mat fgMaskMOG2X = fgMaskMOG2.clone(); 

	Mat ContourImg; 
	Ptr< BackgroundSubtractor> pMOG2 = pMOG2Pointer; 
	Mat element = getStructuringElement(MORPH_RECT, Size(7, 7), Point(3, 3));
	imshow("Original", frame);

	//PreProcess
	blur(frame, frame, Size(4, 4));
	imshow("Blurred", frame);

	//Background subtraction
	pMOG2->operator()(frame, fgMaskMOG2X, -1);
	//imshow("fgMaskMOG2X", frame);

	morphologyEx(fgMaskMOG2X, frame, CV_MOP_CLOSE, element);
	imshow("Background Subtracted", frame);

	threshold(frame, frame, 180, 255, CV_THRESH_BINARY);
	imshow("Shadow Removed", frame);

	cvWaitKey(1);
	ContourImg = frame.clone();
	findContours(ContourImg,
		result, // a vector of contours
		CV_RETR_EXTERNAL, // retrieve the external contours
		CV_CHAIN_APPROX_NONE); // all pixels of each contours


	fgMaskMOG2 = fgMaskMOG2X.clone();
	return result;
}
Example #9
0
void hysteresis(Mat img, Size size, String name, FileData &fd) {
	blur(img,img,size);
	Rgb rgb;
	Hsl hsl;
	int r,g,b;
	vector<double> HSL;
	double h,s,l;
	String pix;
	String hslStr;
	deque<String> colorWindow;
	deque<String> hslVec;
	for(int i=0; i<img.rows; i++)  {
		for(int j=0; j<img.cols; j++)  {
			r = img.at<Vec3b>(i,j)[2];
			g = img.at<Vec3b>(i,j)[1];
			b = img.at<Vec3b>(i,j)[0];
			HSL = hsl.rgb2hsl(r,g,b);
			h = HSL[0];
			s = ip::roundDecimal(HSL[1],2);
			l = ip::roundDecimal(HSL[2],2);
			pix = rgb.checkBlack(r,g,b);
			if(pix=="OTHER") {
				pix = rgb.calcColor(r,g,b);
			}
			colorWindow.push_back(pix);
			hslStr = ip::toString(h)+";"+ip::toString(s)+";"+ip::toString(l);
			hslVec.push_back(hslStr);
		}
		fd.windowVec.push_back(colorWindow);
		fd.hslMat.push_back(hslVec);
		colorWindow.clear();
		hslVec.clear();
	}
	Intensity in;
	in.calcMainColorMatrix(fd.getImage(), fd.windowVec, fd.hslMat, fd.filename, fd);
	//rule5(fd);
	cout << "Done!" << endl;
	colorWindow.clear();
	colorWindow.shrink_to_fit();
	hslVec.clear();
	hslVec.shrink_to_fit();
}
Example #10
0
void* threadBlur(void* argument) {
	
	// unpacking arguments
	args* image = (args*)argument;
	struct image* img = image->img;
	struct pixel* pixels = image->pixels;
	int firstLine = image->firstLine;
	int lastLine = image->lastLine;
	
	int i;
	int j;
	for (i=firstLine; i<=lastLine; i++) {
		if (i<img->height) {
			for (j=0; j<img->width; j++) {
				img->pixels[i * img->width +j] = blur(img,pixels,j,i);
			}
		}
	}
	pthread_exit(NULL);
}
Example #11
0
void Objectness::nonMaxSup(CMat &matchCost1f, ValStructVec<float, Point> &matchCost, int NSS, int maxPoint, bool fast)
{
    const int _h = matchCost1f.rows, _w = matchCost1f.cols;
    Mat isMax1u = Mat::ones(_h, _w, CV_8U), costSmooth1f;
    ValStructVec<float, Point> valPnt;
    matchCost.reserve(_h * _w);
    valPnt.reserve(_h * _w);
    if (fast) {
        blur(matchCost1f, costSmooth1f, Size(3, 3));
        for (int r = 0; r < _h; r++) {
            const float* d = matchCost1f.ptr<float>(r);
            const float* ds = costSmooth1f.ptr<float>(r);
            for (int c = 0; c < _w; c++)
                if (d[c] >= ds[c])
                    valPnt.pushBack(d[c], Point(c, r));
        }
    }
    else {
        for (int r = 0; r < _h; r++) {
            const float* d = matchCost1f.ptr<float>(r);
            for (int c = 0; c < _w; c++)
                valPnt.pushBack(d[c], Point(c, r));
        }
    }

    valPnt.sort();
    for (int i = 0; i < valPnt.size(); i++) {
        Point &pnt = valPnt[i];
        if (isMax1u.at<byte>(pnt)) {
            matchCost.pushBack(valPnt(i), pnt);
            for (int dy = -NSS; dy <= NSS; dy++) for (int dx = -NSS; dx <= NSS; dx++) {
                    Point neighbor = pnt + Point(dx, dy);
                    if (!CHK_IND(neighbor))
                        continue;
                    isMax1u.at<byte>(neighbor) = false;
                }
        }
        if (matchCost.size() >= maxPoint)
            return;
    }
}
Example #12
0
vector<Point> convexHullExtraction(Mat src) {
    Mat src_gray;
    cvtColor(src, src_gray, CV_BGR2GRAY);
    blur(src_gray, src_gray, Size(3, 3));

    // Convex Hull implementation
    Mat src_copy = src.clone();
    Mat dst;
    vector<vector<Point> > contours;
    vector<Vec4i> hierarchy;

    // Find contours
    threshold(src_gray, dst, 200, 255, THRESH_BINARY);
    findContours(dst, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0));

    // Find the convex hull object for each contour
    vector<vector<Point> >hull(1);
    convexHull(Mat(contours[1]), hull[0], false);


    // Draw contours + hull results
    if (convexShow) {
        RNG rng;
        Mat drawing = Mat::zeros(dst.size(), CV_8UC3);
        for (int i = 0; i< contours.size(); i++)
        {
            if (i == 1)
                drawContours(drawing, contours, i, Scalar(255, 255, 0), 1, 8, vector<Vec4i>(), 0, Point());
            if (i == 0)
                drawContours(drawing, hull, i, Scalar(255, 255, 255), 1, 8, vector<Vec4i>(), 0, Point());
        }
        // Show in a window
        namedWindow("Hull demo", CV_WINDOW_AUTOSIZE);
        imshow("Hull demo", drawing);
        if (save)
            imwrite("Hull.jpg", drawing);
        waitKey(0);
    }

    return hull[0];
}
Example #13
0
void Img::boundarize() {
	Mat image_gray, canny_output, nonzeros;
	int thresh = 100;
	vector<vector<Point> > contours;
	vector<Vec4i> hierarchy;
	RNG rng(12345);
	
	cvtColor( this->image, image_gray, CV_BGR2GRAY );
	blur( image_gray, image_gray, Size(3,3) );

	Canny( image_gray, canny_output, thresh, thresh*2, 3 );
	findContours( canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0) );

	this->image =   Mat::zeros( canny_output.size(), CV_8UC3 );

	for( unsigned int i = 0; i< contours.size(); i++ )
	{
	       Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
	       drawContours( this->image, contours, i, color, 2, 8, hierarchy, 0, Point() );
	}

	cvtColor( this->image, this->image, CV_BGR2GRAY );
	threshold( this->image, this->image, 0, 255,0 );
	findNonZero(this->image, nonzeros);
	
	this->points = nonzeros.rows;
	
	delete [] vx;
	delete [] vy;

	std::cout << "\n[+] Initiallizing vectors : "<<points<<" points detected";

	vx = new int[points+1];
	std::cout << "\n[+] Vx initiallized";
	vy = new int[points+1];
	std::cout << "\n[+] Vy Initiallized";
	
	this->sampling();
	
	
}
Example #14
0
Mat BookSegmenter::CannyThreshold(Mat src_gray){
	int lowThreshold = 30;
	int ratio = 3;
	int kernel_size = 3;

	/// Reduce noise with a kernel 3x3
	Mat detected_edges;
	blur(src_gray, detected_edges, Size(3, 3));

	/// Canny detector
	Canny(detected_edges, detected_edges, lowThreshold, lowThreshold * ratio,
			kernel_size);

	/// Using Canny's output as a mask, we display our result
	//Mat dst = Scalar::all(0);

	//src_gray.copyTo( dst, detected_edges);
	//imshow("Canny", detected_edges);

	return detected_edges;
}
void Filter::applyFilter()
{
    configureSpinBox();
    switch(currentFilter){
        case FILTER_HOMOGENEOUS:
            blur(originalImage, image, ksize);
            ui->filteredImage->setPixmap(ImageHandler::getQPixmap(image));
        break;
    case FILTER_GAUSSIAN:
        GaussianBlur(originalImage, image, ksize, 0, 0);
        ui->filteredImage->setPixmap(ImageHandler::getQPixmap(image));
        break;
    case FILTER_MEDIAN:
        medianBlur(originalImage, image, ksize.height);
        ui->filteredImage->setPixmap(ImageHandler::getQPixmap(image));
        break;
    case FILTER_BILATERAL:
        bilateralFilter(originalImage, image, 5, sigma, sigma);
        break;
    }
}
Example #16
0
void imageblurgm_draw(SkScalar fSigmaX, SkScalar fSigmaY, SkCanvas* canvas) {
        SkPaint paint;
        SkAutoTUnref<SkImageFilter> blur(SkBlurImageFilter::Create(fSigmaX, fSigmaY));
        paint.setImageFilter(blur);
        canvas->saveLayer(nullptr, &paint);
        const char* str = "The quick brown fox jumped over the lazy dog.";

        SkRandom rand;
        SkPaint textPaint;
        textPaint.setAntiAlias(true);
        sk_tool_utils::set_portable_typeface(&textPaint);
        for (int i = 0; i < 25; ++i) {
            int x = rand.nextULessThan(WIDTH);
            int y = rand.nextULessThan(HEIGHT);
            textPaint.setColor(sk_tool_utils::color_to_565(rand.nextBits(24) | 0xFF000000));
            textPaint.setTextSize(rand.nextRangeScalar(0, 300));
            canvas->drawText(str, strlen(str), SkIntToScalar(x),
                             SkIntToScalar(y), textPaint);
        }
        canvas->restore();
}
Example #17
0
/*
 * add_mi does the work necessary to set up an mspectrum object for modeling. 
 *   - an entry in the m_State object is made for the parent ion M+H
 * once an mspectrum has been added, the original mspectrum is no longer
 * needed for modeling, as all of the work associated with a spectrum
 * is only done once, prior to modeling sequences.
 */
bool mscore_tandem::add_mi(mspectrum &_s)
{
	if (!mscore::add_mi(_s))
		return false;

	if (m_vmiType.size() == 0)	{
		m_vmiType.reserve((long)m_vSpec.size());
		m_pplType = new unsigned long *[m_vSpec.size()+1];
		size_t a = 0;
		while(a < m_vSpec.size()+1)	{
			m_pplType[a] = NULL;
			a++;
		}
	}
/*
 * use blur to improve accuracy at the edges of the fragment ion m/z error range
 */
	blur(_s.m_vMI);

	return true;
}
Example #18
0
void Target::tuneThreshold(Mat imgInput)
{
	Mat src_gray, src = imgInput, detected_edges,dst;

	namedWindow("Control", CV_WINDOW_AUTOSIZE);
	int lowThreshold = 30;
	int upThreshold = 100;
	cvCreateTrackbar("LowThreshold", "Control", &lowThreshold, 255);
	cvCreateTrackbar("UppThreshold", "Control", &upThreshold, 1000);
	cvtColor(src, src_gray, CV_BGR2GRAY);

	while (waitKey(30) != 27)
	{
		blur(src_gray, detected_edges, Size(3, 3));
		Canny(detected_edges, detected_edges, lowThreshold, upThreshold, 3);
		dst = Scalar::all(0);
		//
		src.copyTo(dst, detected_edges);
		imshow("Control", dst);
	}
}
Example #19
0
// gliese581h suggested filling a cv::Mat with descriptors to enable BFmatcher compatibility
// speed-ups and enhancements by gliese581h
void LUCIDImpl::compute(InputArray _src, std::vector<KeyPoint> &keypoints, OutputArray _desc) {
    cv::Mat src_input = _src.getMat();
    if (src_input.empty())
        return;
    CV_Assert(src_input.depth() == CV_8U && src_input.channels() == 3);

    Mat_<Vec3b> src;

    blur(src_input, src, cv::Size(b_kernel, b_kernel));

    int x, y, j, d, p, m = (l_kernel*2+1)*(l_kernel*2+1)*3, width = src.cols, height = src.rows, r, c;

    Mat_<uchar> desc(static_cast<int>(keypoints.size()), m);

    for (std::size_t i = 0; i < keypoints.size(); ++i) {
        x = static_cast<int>(keypoints[i].pt.x)-l_kernel, y = static_cast<int>(keypoints[i].pt.y)-l_kernel, d = x+2*l_kernel, p = y+2*l_kernel, j = x, r = static_cast<int>(i), c = 0;

        while (x <= d) {
            Vec3b &pix = src((y < 0 ? height+y : y >= height ? y-height : y), (x < 0 ? width+x : x >= width ? x-width : x));

            desc(r, c++) = pix[0];
            desc(r, c++) = pix[1];
            desc(r, c++) = pix[2];

            ++x;
            if (x > d) {
                if (y < p) {
                    ++y;
                    x = j;
                }
                else
                    break;
            }
        }
    }

    if (_desc.needed())
        sort(desc, _desc, SORT_EVERY_ROW | SORT_ASCENDING);
}
Example #20
0
/* Update video */
void osd_update_video(void)
{
    /* Wait for VSync */
    if(option.vsync) vsync();

    /* Update the palette */
    if(option.video_depth == 8) update_palette();

    /* Use the blur effect */
    if(option.blur)
    {
        blur((uint16 *)&sms_bmp->line[bitmap.viewport.y][bitmap.viewport.x * bitmap.granularity],
                bitmap.viewport.w, bitmap.viewport.h, 
                (bitmap.width-bitmap.viewport.w) * bitmap.granularity
                );
    }

    if(option.fps) msg_print(2, 2, "%d", frame_rate);
    if(msg_enable)  msg_print(4, bitmap.viewport.h - 12, "%s", msg);

    blitter_proc(sms_bmp, screen);
}
Example #21
0
void CmSaliencyGC::HistgramGMMs()
{
	// Color quantization
	Mat _colorNums1f, _binBGR3f;
	CmColorQua::D_Quantize(_img3f, _HistBinIdx1i, _binBGR3f, _colorNums1f);
	//CmShow::HistBins(_binBGR3f, _colorNums1i, _nameNE + "_QT.jpg", true);
	_colorNums1f.convertTo(_colorNums1f, CV_32F);

	// Train GMMs
	Mat gmmIdx1i;
	_gmm.BuildGMMs(_binBGR3f, gmmIdx1i, _colorNums1f);
	_NUM = _gmm.RefineGMMs(_binBGR3f, gmmIdx1i, _colorNums1f);
	_PixelSalCi1f.resize(_NUM);
	_HistBinSalCi1f.resize(_NUM);
	_gmmClrs.resize(_NUM);
	_gmmW.resize(_NUM);
	_csd.resize(_NUM);
	_gu.resize(_NUM);
	_fSal.resize(_NUM);

	// Assign GMM color means and weights
	for (int i = 0; i < _NUM; i++)
		_gmmClrs[i] = _gmm.getMean(i), _gmmW[i] = _gmm.getWeight(i);

	// Assign GMMs for each components
	_gmm.GetProbs(_binBGR3f, _HistBinSalCi1f);
#pragma omp parallel for
	for (int c = 0; c < _NUM; c++){
		_PixelSalCi1f[c].create(_img3f.size(), CV_32FC1);
		float *prob = (float*)(_HistBinSalCi1f[c].data);
		for (int y = 0; y < _HistBinIdx1i.rows; y++){
			const int *_idx = _HistBinIdx1i.ptr<int>(y);
			float* probCI = _PixelSalCi1f[c].ptr<float>(y);
			for (int x = 0; x < _HistBinIdx1i.cols; x++)
				probCI[x] = prob[_idx[x]];
		}
		blur(_PixelSalCi1f[c], _PixelSalCi1f[c], Size(3, 3));
	}
}
Example #22
0
void GrabCutMF::runGrabCutOpenCV(CStr &wkDir)
{
	CStr imgDir = wkDir + "Imgs/", salDir = wkDir + "Sal/";
	vecS namesNE;
	int imgNum = CmFile::GetNamesNE(imgDir + "*.jpg", namesNE);
	CmFile::MkDir(salDir);

	// Number of labels
	CmTimer tm("Time");
	tm.Start();
	for (int i = 0; i < imgNum; i++){
		printf("Processing %d/%d: %s.jpg%20s\r\n", i, imgNum, _S(namesNE[i]), "");
		CmFile::Copy(imgDir + namesNE[i] + ".jpg", salDir + namesNE[i] + ".jpg");
		CmFile::Copy(imgDir + namesNE[i] + ".png", salDir + namesNE[i] + "_GT.png");
		Mat imMat3u = imread(imgDir + namesNE[i] + ".jpg");
		Mat gt1u = imread(imgDir + namesNE[i] + ".png", CV_LOAD_IMAGE_GRAYSCALE);
		//imwrite(imgDir + namesNE[i] + ".bmp", gt1u);
		blur(gt1u, gt1u, Size(3,3));
		Rect wkRect = CmCv::GetMaskRange(gt1u, 1, 128);

		// Prepare data for OneCut
		//Mat rectImg = Mat::ones(gt1u.size(), CV_8U)*255;
		//rectImg(wkRect) = 0;
		//imwrite(salDir + namesNE[i] + ".bmp", imMat3u);
		//imwrite(salDir + namesNE[i] + "_t.bmp", rectImg);

		Mat res1u, bgModel, fgModel;
		grabCut(imMat3u, res1u, wkRect, bgModel, fgModel, 1, GC_INIT_WITH_RECT);
		grabCut(imMat3u, res1u, wkRect, bgModel, fgModel, 5);
		compare(res1u, GC_PR_FGD, res1u, CMP_EQ);
		imwrite(salDir + namesNE[i] + "_GC.png", res1u);
	}
	tm.Stop();
	double avgTime = tm.TimeInSeconds()/imgNum;
	printf("Speed: %gs, %gfps\t\t\n", avgTime, 1/avgTime);

	CmEvaluation::EvalueMask(imgDir + "*.png", salDir, "GC", "");
}
Example #23
0
void ContourThread::run()
{
	int tresh = 50;
	Mat src;
	Mat src_gray;
	Mat drawing;

	RNG rng(12345);

	Mat canny_output;
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	while(true)
	{
		cap >> src;
		drawing  = Mat::zeros(src.size() , CV_8UC3);
		cvtColor(src,src_gray,COLOR_BGR2GRAY);
		blur(src_gray,src_gray,Size(3,3));

		Canny(src_gray,canny_output,tresh,tresh*2,3);
		findContours(canny_output,contours,hierarchy,RETR_TREE,CHAIN_APPROX_SIMPLE,Point(0,0));

		for(int i=0; i<contours.size(); i++)
		{
			Scalar color = Scalar(rng.uniform(0,255),rng.uniform(0,255),rng.uniform(0,255));
			drawContours(drawing,contours,i,color,2,8,hierarchy,0,Point());
		}

		emit NewImgContours(&drawing);

		contours.clear();
		hierarchy.clear();
		waitKey(30);
	}

	return;
}
/*
* processFrame
*
* uses sequential images to detect motion in the left and right halves of the frame.
*
* preconditions:	frame must be a valid Mat object representing a single frame from 
*					from a VideoCapture object
* postconditions:	sets left and right paddles according to motion detected in the
*					left and right halves of the frame, respectively
*/
void MotionPaddleDetector::processFrame(Mat& frame) {
	Mat frame2, gray, gray2, thres, diff;

	// use sequential images (frame and frame2) for motion detection

	// read in frame and convert to grayscale
	m_vid->read(frame);
	flip(frame, frame, 1);
	cvtColor(frame, gray, COLOR_BGR2GRAY);

	// read in frame2 and convert to grayscale
	m_vid->read(frame2);
	flip(frame2, frame2, 1);
	cvtColor(frame2, gray2, COLOR_BGR2GRAY);

	// create difference image of frame1 and frame2 after being converted to
	// grayscale images
	absdiff(gray, gray2, diff);

	// threshold difference
	threshold(diff, thres, THRESHOLD_SENSITIVITY, 255, THRESH_BINARY);

	// blur the image. output will be an intensity image
	blur(thres, thres, cv::Size(BLUR_SIZE, BLUR_SIZE));

	// threshold intensity image to get binary image (after blurring)
	threshold(thres, thres, THRESHOLD_SENSITIVITY, 255, THRESH_BINARY);

	// split threshold (now binary image) into left and right halves
	int x = thres.cols / 2;
	int y = thres.rows;
	Mat thresholdLeft(thres, Rect(0, 0, x, y));
	Mat thresholdRight(thres, Rect(x, 0, x, y));

	// detect motion in each half of the binary image
	detectMotion(thresholdLeft, frame, IS_RED);
	detectMotion(thresholdRight, frame, IS_BLUE);
}
    void onDraw(int loops, SkCanvas* canvas) override {
        SkPaint paint;
        static const SkScalar kX = 0;
        static const SkScalar kY = 0;
        const SkRect bmpRect = SkRect::MakeXYWH(kX, kY,
                                                SkIntToScalar(fCheckerboard.width()),
                                                SkIntToScalar(fCheckerboard.height()));
        const SkImageFilter::CropRect cropRect(bmpRect.makeInset(10.f, 10.f));
        const SkImageFilter::CropRect cropRectLarge(bmpRect);
        SkAutoTUnref<SkImageFilter> noOpCropped(SkOffsetImageFilter::Create(0, 0, nullptr,
                                                &cropRect));

        SkImageFilter* input = fIsExpanded ? noOpCropped.get() : nullptr;

        const SkImageFilter::CropRect* crop =
            fIsExpanded ? &cropRectLarge : fIsCropped ? &cropRect : nullptr;
        SkAutoTUnref<SkImageFilter> blur(SkBlurImageFilter::Create(fSigmaX, fSigmaY, input, crop));
        paint.setImageFilter(blur);

        for (int i = 0; i < loops; i++) {
            canvas->drawBitmap(fCheckerboard, kX, kY, &paint);
        }
    }
Example #26
0
void captcha_render(unsigned char im[70*200], const unsigned char lts[6])
{
	unsigned i;
	unsigned char swr[200];
	uint8_t s1,s2;

	int f=open("/dev/urandom",O_RDONLY);
	read(f,swr,200); read(f,dr,sizeof(dr)); read(f,&s1,1); read(f,&s2,1);
	close(f);

	memset(im,0xff,200*70); s1=s1&0x7f; s2=s2&0x3f;
	int p=30;
	for (i = 0; i < 5; i++) {
		unsigned l;
		for (l = 0; l < sizeof(letters) / sizeof(*letters) - 1; l++)
			if (letters[l] == lts[i])
				break;
		if (l == sizeof(letters) / sizeof(*letters) - 1)
			l = 0;
		p = letter(l,p,im,swr,s1,s2);
	}
	line(im,swr,s1); dots(im); blur(im);
}
void GoalPostDetector::CannyThreshold()
{
  /// Reduce noise with a kernel 3x3

  Mat greyMat;
  cv::cvtColor(originalImg, greyMat, CV_BGR2GRAY);

  cv::equalizeHist(greyMat, greyMat); //normalize the image, if lighting conditions are changing

  Mat detected_edges;
  blur( greyMat, detected_edges, Size(3,3) );

  /// Canny detector
  Canny( detected_edges, detected_edges, lowThreshold, lowThreshold*ratio, kernelSize );

  /// Using Canny's output as a mask, we display our result
   img = Scalar::all(0);

   greyMat.copyTo( img, detected_edges);

   cv::threshold(img, img, 0, 255, cv::THRESH_BINARY);

}
    virtual void onDraw(SkCanvas* canvas) {
        if (!fInitialized) {
            this->make_bitmap();
            fInitialized = true;
        }
        canvas->clear(0x00000000);
        {
            SkAutoTUnref<SkImageFilter> bitmapSource(new SkBitmapSource(fBitmap));
            SkAutoTUnref<SkColorFilter> cf(SkColorFilter::CreateModeFilter(SK_ColorRED,
                                                         SkXfermode::kSrcIn_Mode));
            SkAutoTUnref<SkImageFilter> blur(new SkBlurImageFilter(4.0f, 4.0f, bitmapSource));
            SkAutoTUnref<SkImageFilter> erode(new SkErodeImageFilter(4, 4, blur));
            SkAutoTUnref<SkImageFilter> color(SkColorFilterImageFilter::Create(cf, erode));
            SkAutoTUnref<SkImageFilter> merge(new SkMergeImageFilter(blur, color));

            SkPaint paint;
            paint.setImageFilter(merge);
            canvas->drawPaint(paint);
        }
        {
            SkAutoTUnref<SkImageFilter> morph(new SkDilateImageFilter(5, 5));

            SkScalar matrix[20] = { SK_Scalar1, 0, 0, 0, 0,
                                    0, SK_Scalar1, 0, 0, 0,
                                    0, 0, SK_Scalar1, 0, 0,
                                    0, 0, 0, SkFloatToScalar(0.5f), 0 };

            SkAutoTUnref<SkColorFilter> matrixFilter(new SkColorMatrixFilter(matrix));
            SkAutoTUnref<SkImageFilter> colorMorph(SkColorFilterImageFilter::Create(matrixFilter, morph));
            SkAutoTUnref<SkXfermode> mode(SkXfermode::Create(SkXfermode::kSrcOver_Mode));
            SkAutoTUnref<SkImageFilter> blendColor(new SkXfermodeImageFilter(mode, colorMorph));

            SkPaint paint;
            paint.setImageFilter(blendColor);
            canvas->drawBitmap(fBitmap, 100, 0, &paint);
        }
    }
Example #29
0
std::vector<cv::Mat> ObjectSelector::getObjects(cv::Mat * src, cv::Point2f p)
{
    std::vector<std::vector<cv::Point> > contours;
    std::vector<cv::Vec4i> hierarchy;
    cv::Mat canny_output;
    cv::Mat src_gray;
    cvtColor( *src, src_gray, CV_BGR2GRAY );
    blur( src_gray, src_gray, cv::Size(3,3) );


    /// Detect edges using canny
    Canny( src_gray, canny_output, thresh, thresh*2, 3 );
    /// Find contours
    findContours( canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cv::Point(0, 0) );

    std::priority_queue<TestedContour> results;

    for( int i = 0; i< contours.size(); i++ )
        results.push(TestedContour(i,-pointPolygonTest(contours[i],p,true)));

    std::vector<cv::Mat> retVal;

    for(int i = 0; i < maxRet && !results.empty(); i++)
    {
        TestedContour c = results.top(); results.pop();
        cv::Rect br = boundingRect( contours[c.Id()] );
        cv::Mat mask = cv::Mat::zeros(src->size(), CV_8UC1);
        std::vector<std::vector<cv::Point> > hull;
        hull.push_back(std::vector<cv::Point>());
        convexHull(contours[c.Id()],hull[0],1,true);
        drawContours( mask, hull, 0, cv::Scalar(255), CV_FILLED );
        cv::Mat tmp = cv::Mat::zeros(br.size(), CV_8UC3);
        src->copyTo(tmp, mask);
        retVal.push_back(cv::Mat(tmp,br));
    }
    return retVal;
}
Example #30
0
void create_shadow(void) {
	glViewport(0,0,SIZE,SIZE);
	glScissor(0,0,SIZE,SIZE);
	glEnable(GL_SCISSOR_TEST);
	
    glBindTexture(GL_TEXTURE_2D,shadow_id);
	
	glClearColor(1,1,1,1);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(-1,1,-1,1,-100,100);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    gluLookAt(light[0],light[1],light[2], mesh[0],mesh[1],mesh[2], 0,0,1);
    
	glColor3f(0.4,0.4,0.4);
	glTranslatef(mesh[0],mesh[1],mesh[2]);
	glRotatef(angle,0,0,1);
	glRotatef(angle / 3,1,0,0);
	glCallList(mesh_id);
	glColor3f(1,1,1);

	if(my_blur) {
		glReadPixels(0,0,SIZE,SIZE,GL_RGB,GL_UNSIGNED_BYTE,image);
		blur(image,SIZE);
		glTexSubImage2D(GL_TEXTURE_2D,0,0,0,SIZE,SIZE,GL_RGB,
			GL_UNSIGNED_BYTE,image);
	} else {
    	glCopyTexSubImage2D(GL_TEXTURE_2D,0,0,0,0,0,SIZE,SIZE);
	}
	
	glDisable(GL_SCISSOR_TEST);
	glViewport(0,0,WIDTH,HEIGHT);
}