void MainWindow::captureImage(QString pref, int saveCount,bool dispaly)
{
    pimage_1.save(projChildPath + "/left/" + pref + "L" + QString::number(saveCount) +".png");
    pimage_2.save(projChildPath + "/right/" + pref + "R" + QString::number(saveCount) +".png");
    if(dispaly){
        for (size_t camCount = 0; camCount < 2; camCount++){
                BYTE *buffer = (camCount == 0)?(image_1.bits()):(image_2.bits());
                cv::Mat mat = cv::Mat(cameraHeight, cameraWidth, CV_8UC1, buffer);//直接从内存缓冲区获得图像数据是可行的
                //int bwThreshold = dm->OSTU_Region(mat);
                cv::Mat bimage = mat >= 60;
                cv::bitwise_not(bimage, bimage);
                vector<cv::Point2d> centers;
                blob->findBlobs(bimage,centers);
            if(camCount==0){
                QPixmap pcopy = pimage_1;
                QPainter pt(&pcopy);
                pt.setPen(greencolor);
                for (size_t i = 0; i < centers.size();i++){
                    drawCross(pt,centers[i].x,centers[i].y);
                }
                ui->leftCaptureLabel->setPixmap(pcopy);
            }
            else{
                QPixmap pcopy_1 = pimage_2;
                QPainter pt_1(&pcopy_1);
                pt_1.setPen(greencolor);
                for (size_t i = 0; i < centers.size();i++)
                {
                    drawCross(pt_1,centers[i].x,centers[i].y);
                }
                ui->rightCaptureLabel->setPixmap(pcopy_1);
            }
        }
    }
}
void MainWindow::paintPoints()
{
    QPixmap pcopy_1 = pimage_1;
    QPixmap pcopy_2 = pimage_2;
    QPainter pt_1(&pcopy_1);
    QPainter pt_2(&pcopy_2);
    pt_1.setFont(textf);
    pt_2.setFont(textf);

    for(int i = 0;i < dm->dotForMark.size();i++)
    {
        if (dm->dotForMark[i][4] == 1){//表明是已知点
            pt_1.setPen(greencolor);
            pt_2.setPen(greencolor);
            pt_1.drawText(dm->dotForMark[i][0],dm->dotForMark[i][1],QString::number(dm->dotForMark[i][5]));
            pt_2.drawText(dm->dotForMark[i][2],dm->dotForMark[i][3],QString::number(dm->dotForMark[i][5]));
            drawCross(pt_1, dm->dotForMark[i][0] ,dm->dotForMark[i][1]);
            drawCross(pt_2, dm->dotForMark[i][2], dm->dotForMark[i][3]);
        }
        else{//表明是未知点
            pt_1.setPen(orangecolor);
            pt_2.setPen(orangecolor);
            drawCross(pt_1, dm->dotForMark[i][0] ,dm->dotForMark[i][1]);
            drawCross(pt_2, dm->dotForMark[i][2], dm->dotForMark[i][3]);
        }
    }
    ui->leftCaptureLabel->setPixmap(pcopy_1);
    ui->rightCaptureLabel->setPixmap(pcopy_2);

#ifdef DEBUG
    ImageViewer *iv = new ImageViewer;
    iv->showImage(pcopy_1);
    iv->show();
#endif
}
//--------------------------------------------------------------------
void glintFinder::drawLine(float x, float y, float width, float height, float len = -1) {

    ofPoint tempPos;

    if (len == -1) {
        len = width / 10;
    }

    ofEnableAlphaBlending();

    ofSetColor(255, 255, 255, 255);
    tempPos = getGlintPosition(GLINT_BOTTOM_LEFT);
    drawCross(tempPos, x, y, width, height, len);

    ofSetColor(255, 0, 0, 255);
    tempPos = getGlintPosition(GLINT_BOTTOM_RIGHT);
    drawCross(tempPos, x, y, width, height, len);

    if (bFourGlints) {

        ofSetColor(0, 200, 200, 100);
        tempPos = getGlintPosition(GLINT_TOP_LEFT);
        drawCross(tempPos, x, y, width, height, len);

        ofSetColor(200, 200, 0, 100);
        tempPos = getGlintPosition(GLINT_TOP_RIGHT);
        drawCross(tempPos, x, y, width, height, len);
    }

    ofDisableAlphaBlending();
}
int main(void) {

    cv::Mat imgBlank(700, 900, CV_8UC3, cv::Scalar::all(0));            // declare a blank image for moving the mouse over

    std::vector<cv::Point> mousePositions;

    cv::Point predictedMousePosition;

    cv::namedWindow("imgBlank");                                // declare window
    cv::setMouseCallback("imgBlank", mouseMoveCallback);        // 

    while (true) {

        mousePositions.push_back(mousePosition);            // get the current position

        predictedMousePosition = predictNextPosition(mousePositions);        // predict the next position

        std::cout << "current position        = " << mousePositions.back().x << ", " << mousePositions.back().y << "\n";
        std::cout << "next predicted position = " << predictedMousePosition.x << ", " << predictedMousePosition.y << "\n";
        std::cout << "--------------------------------------------------\n";

        drawCross(imgBlank, mousePositions.back(), SCALAR_WHITE);
        drawCross(imgBlank, predictedMousePosition, SCALAR_BLUE);                      // draw a cross at the most recent predicted, actual, and corrected positions

        cv::imshow("imgBlank", imgBlank);         // show the image

        cv::waitKey(10);                    // pause for a moment to get operating system to redraw the imgBlank

        imgBlank = cv::Scalar::all(0);         // blank the imgBlank for next time around
    }

    return 0;
}
Exemple #5
0
void ObjectTracker::processImage( IplImage* frame, IplImage** output )
{
    // TODO: this is only an Object Tracker, it should not return an illustrated image.
    //  instead, it should return a structure of information about tracked object.
    //  There need to be an Illustrator coded.
    //
    imgSize_ = cvSize(frame->width, frame->height);
    if (first_) {
        temp_ = cvCreateImage(imgSize_, IPL_DEPTH_8U, 3);
        mask3C_ = cvCreateImage(imgSize_, IPL_DEPTH_8U, 3);
        mask_ = cvCreateImage(imgSize_, IPL_DEPTH_8U, 1);
        first_ = false;
        numStat_ = 100;
        rectsStat_ = new CvRect[numStat_];
        centersStat_ = new CvPoint[numStat_];
    }

    if (*output == NULL) {
        *output = cvCreateImage(imgSize_, IPL_DEPTH_8U, 3);
    }
    cvCopy(frame, *output);
    cvCopy(frame, temp_);
    cvSmooth(temp_, temp_, CV_GAUSSIAN);
    mog_->process((cv::Mat)temp_, (cv::Mat)mask3C_);
    cvCvtColor(mask3C_, mask_, CV_RGB2GRAY);

    cvMorphologyEx(mask_, mask_, 0, 0, CV_MOP_OPEN, openIteration_);
    cvMorphologyEx(mask_, mask_, 0, 0, CV_MOP_CLOSE, closeIteration_);

    // Collect statistics
    if (!rectsStat_)
        rectsStat_ = new CvRect[numStat_];
    if (!centersStat_)
        centersStat_ = new CvPoint[numStat_];

    count_ = numStat_;

    findConnectedComponents(mask_, 0, perimScaleThrhold_, &count_, rectsStat_, centersStat_);
    cvCvtColor(mask_, mask3C_, CV_GRAY2RGB);

    matchObjects(centersStat_, count_);
    for (int i=0; i<currObjs_.size(); i++) {
        drawText(mask3C_, currObjs_[i].label_.c_str(),
                 currObjs_[i].positionHistory_[currObjs_[i].positionHistory_.size() - 1],
                 cvScalar(255,255,0));
        drawCross(mask3C_,
                  currObjs_[i].positionHistory_[currObjs_[i].positionHistory_.size() - 1],
                  5, cvScalar(255,0,0));
        drawCross(mask3C_,
                  currObjs_[i].predictedPositionHistory_[currObjs_[i].predictedPositionHistory_.size() - 1],
                  5, cvScalar(0,255,0));
        drawCross(mask3C_,
                  currObjs_[i].correctedPositionHistory_[currObjs_[i].correctedPositionHistory_.size() - 1],
                  5, cvScalar(0,0,255));
    }

    cvNamedWindow("4", CV_WINDOW_NORMAL);
    cvShowImage("4", mask3C_);
}
// --------------------------------------------------------
void ShiftWidgetRenderer::paint( GLResourceContainer * container, int pass, TouchWidgetRenderer * to_texture_renderer ) const
{
	Q_UNUSED(to_texture_renderer);
	if(pass == 0)
	{
		Q_ASSERT(controller()->magnifyingGlasses().size() == 2);
		const MagnifyingGlass * mg = controller()->magnifyingGlasses().back();
		drawMagnifyingGlass(container, mg);
		drawCross(container, mg->dstCenter());
		drawCross(container, mg->srcCenter());
		//drawWhiteCircle(container, mg->srcCenter(), mg->srcRadius());
		//drawMagnifyingGlass(container, controller()->magnifyingGlasses().front());
	}
}
// ######################################################################
// Prompt the user to return the object back to the tray place an object
void promptReturnObjectToTray(SceneSetup const & setup, int objIdx)
{
  LINFO("Placing back object %d into its tray. Follow instructions in User Interactive window...", objIdx);
  const SceneSetup::Object &obj = setup.objects[objIdx];

  Image< PixRGB<byte> > trayImg = Raster::ReadRGB(setup.setupPath + "/" + obj.trayFileName);
  trayImg = rescale(trayImg, trayImg.getDims()/2);

  Point2D<int> objPos(obj.trayX*trayImg.getWidth(), obj.trayY*trayImg.getHeight());

  drawCircle(trayImg, objPos, trayImg.getWidth()/12, PixRGB<byte>(255, 0, 0), 2);
  drawCross(trayImg, objPos, PixRGB<byte>(255, 0, 0), trayImg.getWidth()/8, 2);

  std::ostringstream ss;
  ss << "Place back Obj(" <<
    "row " << obj.trayRow << 
    ", col " << obj.trayColumn << 
    ") into tray " << obj.trayName << 
    " and ENTER";
  writeText(trayImg, Point2D<int>(0, 0), ss.str().c_str());
  
  userInteractiveWindow->drawImage(trayImg, 0, 0, true);

  // eat all previous mouse clicks and key presses, just in case:
  while (userInteractiveWindow->getLastMouseClick() != Point2D<int>(-1, -1)) { }
  while (userInteractiveWindow->getLastKeyPress() != -1) { }

  // wait for ENTER:
  while (userInteractiveWindow->getLastKeyPress() != 36) usleep(100000);

  LINFO("Done placing back object %d into its tray.", objIdx);
}
Exemple #8
0
/*main scene Func*/
void scene(){
	glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	glLoadIdentity();
	drawCross();
	pCamera->bindCamera();
	OBJ.render();
	sOBJ.render();
	snow.render();
	glutSwapBuffers();
}
void Graph::scatterPlotCross(){
    init(); //Draw axis and set labels
    setcolor(COLOR(255,255,255));
    outtextxy(425, 40, "SCATTER PLOT CROSSES");
    setlinestyle(SOLID_LINE, 1, 2 );
    setcolor(COLOR(174,137,118));

    for (int i = 0; i < counter ; ++i) {
        drawCross(points[i][0], points[i][1]);
    }
    setlinestyle(SOLID_LINE, 1, 1);

}
Exemple #10
0
// replace this with overlay
void
calImageWindow::draw() 
{
  int i;
  imWindow::draw();

  Fl_Color color = FL_GREEN;
  if (!goodpts)
    color = FL_RED;

  for(i=0; i<num_pts2D; i++)
    drawCross(Im2WinX((int)(pts2D[i].x)), Im2WinY((int)(pts2D[i].y)), color);
  if (num_pts2D > 0)
    drawBox(Im2WinX((int)(pts2D[0].x)), Im2WinY((int)(pts2D[0].y)), color);
}			
//--------------------------------------------------------------------
void glintFinder::drawLineOnBrightGlint(float x, float y, float width, float height, float len) {

    ofPoint tempPos;

    if (len == -1) {
        len = width / 10;
    }

    ofEnableAlphaBlending();

    ofSetColor(255, 255, 255, 255);
    tempPos = getGlintPosition(GLINT_IN_BRIGHT_EYE) / magRatio;
//	contourFinderBright.draw(x,y);
    drawCross(tempPos, x, y, width, height, len);

    ofDisableAlphaBlending();
}
void drawCorners(ImgRGB& img_show, const Mat_i& corners, uchar r, uchar g,
		uchar b, uchar sym) {
	int nc = corners.rows;
	//draw corners
	int i;
	for (i = 0; i < nc; ++i) {
		int x0 = corners.data[2 * i];
		int y0 = corners.data[2 * i + 1];

		switch (sym) {
		case 'c':
			drawCross(img_show, x0, y0, r, g, b);
			break;
		case 's':
			drawSquare(img_show, x0, y0, r, g, b);
			break;
		case '.':
			drawPoint(img_show, x0, y0, r, g, b);
			break;
		}
	}
}
Exemple #13
0
// returns an image showing the positions of the points on the map
cv::Mat * drawPOI(std::vector<Eigen::Vector3d> * map) {
	// choose dimentions of the map
	int columns = 0;
	int rows = 0;
	for (unsigned int i = 0; i < map->size(); i++) {
		if ((*map)[i][0] > columns)
			columns = (*map)[i][0];
		if ((*map)[i][1] > rows)
			rows = (*map)[i][1];
	}

	// increase the paint size
	columns = 1.2 * columns;
	rows = 1.2 * rows;

	// create the image
	cv::Mat * image;

	if ((rows > 0) && (columns > 0)) {
		image = new cv::Mat(rows, columns, CV_8UC3, cv::Scalar(255, 255, 200));
	} else {
		*image = cv::imread("images/err_image.png", 0);
		return image;
	}

	int cross_size = MIN(columns, rows) / 40;

	// draw the POIs
	for (unsigned int i = 0; i < map->size(); i++) { // for every POI
		Eigen::Vector3d draw_here = (*map)[i];
		//		draw_here[1] = rows - draw_here[1];
		drawCross(image, draw_here, cross_size,4);
	}

	return image;

}
int main(void) {

    cv::KalmanFilter kalmanFilter(4, 2, 0);                             // instantiate Kalman Filter

    float fltTransitionMatrixValues[4][4] = { { 1, 0, 1, 0 },           // declare an array of floats to feed into Kalman Filter Transition Matrix, also known as State Transition Model
                                              { 0, 1, 0, 1 },
                                              { 0, 0, 1, 0 },
                                              { 0, 0, 0, 1 } };
    
    kalmanFilter.transitionMatrix = cv::Mat(4, 4, CV_32F, fltTransitionMatrixValues);       // set Transition Matrix

    float fltMeasurementMatrixValues[2][4] = { { 1, 0, 0, 0 },          // declare an array of floats to feed into Kalman Filter Measurement Matrix, also known as Measurement Model
                                               { 0, 1, 0, 0 } };

    kalmanFilter.measurementMatrix = cv::Mat(2, 4, CV_32F, fltMeasurementMatrixValues);     // set Measurement Matrix

    cv::setIdentity(kalmanFilter.processNoiseCov, cv::Scalar::all(0.0001));           // default is 1, for smoothing try 0.0001
    cv::setIdentity(kalmanFilter.measurementNoiseCov, cv::Scalar::all(10));         // default is 1, for smoothing try 10
    cv::setIdentity(kalmanFilter.errorCovPost, cv::Scalar::all(0.1));               // default is 0, for smoothing try 0.1

    cv::Mat imgBlank(700, 900, CV_8UC3, cv::Scalar::all(0));            // declare a blank image for moving the mouse over

    std::vector<cv::Point> predictedMousePositions;                 // declare 3 vectors for predicted, actual, and corrected positions
    std::vector<cv::Point> actualMousePositions;
    std::vector<cv::Point> correctedMousePositions;

    cv::namedWindow("imgBlank");                                // declare window
    cv::setMouseCallback("imgBlank", mouseMoveCallback);        // 

    while (true) {
        cv::Mat matPredicted = kalmanFilter.predict();

        cv::Point ptPredicted((int)matPredicted.at<float>(0), (int)matPredicted.at<float>(1));

        cv::Mat matActualMousePosition(2, 1, CV_32F, cv::Scalar::all(0));

        matActualMousePosition.at<float>(0, 0) = (float)ptActualMousePosition.x;
        matActualMousePosition.at<float>(1, 0) = (float)ptActualMousePosition.y;

        cv::Mat matCorrected = kalmanFilter.correct(matActualMousePosition);        // function correct() updates the predicted state from the measurement

        cv::Point ptCorrected((int)matCorrected.at<float>(0), (int)matCorrected.at<float>(1));

        predictedMousePositions.push_back(ptPredicted);
        actualMousePositions.push_back(ptActualMousePosition);
        correctedMousePositions.push_back(ptCorrected);

            // predicted, actual, and corrected are all now calculated, time to draw stuff

        drawCross(imgBlank, ptPredicted, SCALAR_BLUE);                      // draw a cross at the most recent predicted, actual, and corrected positions
        drawCross(imgBlank, ptActualMousePosition, SCALAR_WHITE);
        drawCross(imgBlank, ptCorrected, SCALAR_GREEN);
        
        for (int i = 0; i < predictedMousePositions.size() - 1; i++) {                  // draw each predicted point in blue
            cv::line(imgBlank, predictedMousePositions[i], predictedMousePositions[i + 1], SCALAR_BLUE, 1);
        }

        for (int i = 0; i < actualMousePositions.size() - 1; i++) {                     // draw each actual point in white
            cv::line(imgBlank, actualMousePositions[i], actualMousePositions[i + 1], SCALAR_WHITE, 1);
        }

        for (int i = 0; i < correctedMousePositions.size() - 1; i++) {                  // draw each corrected point in green
            cv::line(imgBlank, correctedMousePositions[i], correctedMousePositions[i + 1], SCALAR_GREEN, 1);
        }

        cv::imshow("imgBlank", imgBlank);         // show the image
        
        cv::waitKey(10);                    // pause for a moment to get operating system to redraw the imgBlank

        imgBlank = cv::Scalar::all(0);         // blank the imgBlank for next time around
    }

    return 0;
}
void *inputCheck(void *data) {
	// Variables to be assigned values
	extern int frequency[];
	extern float amplitude[];
	extern float timeDiv;
	extern int triggerLevel;
	extern int voltageDiv;
	extern int waveType[2];
	extern int offset;
	extern int menu;
	extern int mode;

	//Stores latest value of buttons
	int latestValue[4];
	//Stores previous values for buttons
	int previousValue[4] = { 1, 1, 1, 1 };

	while (!gShouldStop) {

		//Handles Scroll Button Input
		if (is_high(scrollButton))
			latestValue[0] = 1;
		else
			latestValue[0] = 0;
		//Cycles through menu
		if (latestValue[0] == 0 && previousValue[0] == 1) {
			if (mode == -1) {
				menu++;
				menu %= 3;
				memcpy(displayBuffer, &menuBuffer[menu], 1024);
			}
		}
		previousValue[0] = latestValue[0];

		//Handles Select Button Input
		if (is_high(selectButton))
			latestValue[1] = 1;
		else
			latestValue[1] = 0;

		if (latestValue[1] == 0 && previousValue[1] == 1) {
			//Selects menu or goes back to menu
			if (mode == -1) {
				if (menu == 0) {
					mode = 0;
					memcpy(displayBuffer, &oscilliscopeBuffer, 1024);
				} else if (menu == 1) {
					mode = 1;
					memcpy(displayBuffer, &signalGenBuffer, 1024);
				} else if (menu == 2) {
					mode = 2;
					memcpy(displayBuffer, &aboutBuffer, 1024);
				}
			} else {
				mode = -1;
				memcpy(displayBuffer, &menuBuffer[menu], 1024);
			}
		}
		previousValue[1] = latestValue[1];

		//Handles Wave Type For Channel 1 or Offset for oscilloscope
		if (is_high(button1))
			latestValue[2] = 1;
		else
			latestValue[2] = 0;

		if (latestValue[2] == 0 && previousValue[2] == 1) {
			if (mode == 1) {
				if (waveType[0] == 0) {
					waveType[0] = 1;
					drawCross(26, 57, 0);
					drawCross(58, 57, 1);
				} else if (waveType[0] == 1) {
					waveType[0] = 2;
					drawCross(26, 57, 0);
					drawCross(58, 57, 0);
				} else {
					waveType[0] = 0;
					drawCross(26, 57, 1);
				}
			} else if (mode == 0) {
				offset++;
				if (offset > 30)
					offset = 30;
			}
		}
		previousValue[2] = latestValue[2];

		//Handles Wave Type For Channel 2 or Offset for oscilloscope
		if (is_high(button2))
			latestValue[3] = 1;
		else
			latestValue[3] = 0;

		if (latestValue[3] == 0 && previousValue[3] == 1) {
			if (mode == 1) {
				if (waveType[1] == 0) {
					waveType[1] = 1;
					drawCross(89, 57, 0);
					drawCross(121, 57, 1);
				} else if (waveType[1] == 1) {
					waveType[1] = 2;
					drawCross(89, 57, 0);
					drawCross(121, 57, 0);
				} else {
					waveType[1] = 0;
					drawCross(89, 57, 1);
				}
			} else if (mode == 0) {
				offset--;
				if (offset < 1)
					offset = 1;
			}
		}

		previousValue[3] = latestValue[3];

		//Retrieves 10 analouge samples
		BBBIO_work(10);

		//Averages the retrieved smaples
		int sampleAverage[5] = { 0, 0, 0, 0, 0 };
		for (int n = 0; n < 10; n++) {
			for (int m = 0; m < 5; m++) {
				sampleAverage[m] += analougeInputs[m][n];
			}
		}
		for (int n = 0; n < 5; n++)
			sampleAverage[n] /= 10;
		//End of averaging

		//checks if device in oscilliscope mode
		if (mode == 0) {

			triggerLevel = 1 + (sampleAverage[2] * (62 - 1) / 4096); // Normalises trigger level to between 1 and 62

			voltageDiv = 1 + (sampleAverage[1] * (5 - 1) / 4096); // normalise to between 0 and 5 for voltage division

			sampleAverage[0] = (sampleAverage[0] * 6 / 4096); // normalise to between 0 and 6 for time divisions

			//10 pixels per division hence sampleSkip=sampleRate X timeDiv /10
			switch (sampleAverage[0]) {
			case 0:
				timeDiv = 0.1; //0.1mS
				sampleSkip = 1; //0.96
				break;
			case 1:
				timeDiv = 0.5; //0.5mS
				sampleSkip = 5; //4.8
				break;
			case 2:
				timeDiv = 1; //1mS
				sampleSkip = 10; //9.6
				break;
			case 3:
				timeDiv = 2; //2mS
				sampleSkip = 19; //19.2
				break;
			case 4:
				timeDiv = 5; //5mS
				sampleSkip = 48; //48
				break;
			case 5:
				timeDiv = 10; //10mS
				sampleSkip = 96; //96
				break;
			}

			printf(
					"\rTime Div = %.1f mS, Voltage Div = %.2f v,TriggerLevel= %d ",
					timeDiv, (float) voltageDiv / 4, triggerLevel);

			fflush(stdout);
			//Checks if device in signal generator mode
		} else if (mode == 1) {

			frequency[0] = ((50 + (sampleAverage[3] * (20000 - 50) / 4096)) / 50
					* 50); // normalise to 50-20000 and assigns to frequency1
			frequency[1] = ((50 + (sampleAverage[4] * (20000 - 50) / 4096)) / 50
					* 50); // normalise to 50-20000 and assigns to frequency2

			drawNumber(18, 19, frequency[0], 4);
			drawNumber(82, 19, frequency[1], 4);

			amplitude[0] = (float) sampleAverage[1] / 4096;
			amplitude[1] = (float) sampleAverage[2] / 4096;
			drawNumber(42, 30, sampleAverage[1] * (100) / 4096, 1);
			drawNumber(106, 30, sampleAverage[2] * (100) / 4096, 1);
		}

		updateDisplay();
		usleep(30000);
	}
	pthread_exit(NULL);

}
void AgentsVisualization::visualize(bool blend, const Simulation*, const ColorMap*) const
{
  if (blend)
    {
      glEnable(GL_BLEND);
      glBlendFunc(GL_SRC_ALPHA, GL_ONE);
      glDisable(GL_DEPTH_TEST);
    }
  else
    {
      glDisable(GL_BLEND);
      glEnable(GL_DEPTH_TEST);
    }

  if (_drawGrid)
    {
      drawGrid();
    }

  if (_selRow != -1 && _selCol != -1)
    {
      glLineWidth(2.0f);
      glColor4f(1.0f, 1.0f, 0.0f, 1.0f);
      drawMark(_selRow, _selCol);
      glLineWidth(1.0f);
    }

  const std::vector<ScalarAgentItem>& grid = _pScalarAgentGrid->getGrid();
  double minRatio = DBL_MAX, maxRatio = 0;
  if(_drawm1m2 == 2)
    {
      for(int i=0; i<_DIM; i++)
        for(int j=0; j<_DIM; j++)
          for(int k=0; k<2; k++)
            {
              if(!grid[i*_DIM+j]._pAgent[k]) continue;
              switch(grid[i*_DIM+j]._pAgent[k]->getAgentType())
                {
                case MAC:
                {
                  const Mac* pMac = static_cast<const Mac*>(grid[i*_DIM+j]._pAgent[k]);
                  if(!_macFilter[pMac->getState()][AgentsWidget::ENBL]) continue;
                  char state = (pMac->getNFkB() << AgentsWidget::NFKB) | (pMac->getStat1() << AgentsWidget::STAT1) | (pMac->isDeactivated() << AgentsWidget::DEACT);
                  state |= (state == 0) << AgentsWidget::OTHER;
                  state &= (_macFilter[pMac->getState()][AgentsWidget::NFKB] << AgentsWidget::NFKB)
                           | (_macFilter[pMac->getState()][AgentsWidget::STAT1] << AgentsWidget::STAT1)
                           | (_macFilter[pMac->getState()][AgentsWidget::DEACT] << AgentsWidget::DEACT)
                           | (_macFilter[pMac->getState()][AgentsWidget::OTHER] << AgentsWidget::OTHER);
                  if(!state) continue;
                }
                break;
                default: continue;
                }
              minRatio = std::min(minRatio, grid[i*_DIM+j]._pAgent[k]->getM1M2Ratio());
              maxRatio = std::max(maxRatio, grid[i*_DIM+j]._pAgent[k]->getM1M2Ratio());
            }
    }

  for (int i = 0; i < _DIM; i++)
    for (int j = 0; j < _DIM; j++)
      {
        int val = grid[i * _DIM + j]._bitMask;
        if (GET_BIT(val, ScalarAgentGrid::_bitCas) && _drawCas)
        {
          glColor4f(1.0f, 1.0f, 1.0f, _gridAlpha);
          drawCross(i, j);
        }
        if (GET_BIT(val, ScalarAgentGrid::_bitSrc) && _drawSrc &&
            (!_drawSrcMac || GET_BIT(val, ScalarAgentGrid::_bitSrcMac)) &&
            (!_drawSrcTgam || GET_BIT(val, ScalarAgentGrid::_bitSrcTgam)) &&
            (!_drawSrcTcyt || GET_BIT(val, ScalarAgentGrid::_bitSrcTcyt)) &&
            (!_drawSrcTreg || GET_BIT(val, ScalarAgentGrid::_bitSrcTreg)))
          {
            glColor4f(0.8f, 0.8f, 0.8f, _gridAlpha);
            drawQuad(i, j);
          }
        if(_drawSquares) {  //Setup a priority for each agents drawn in case of drawing squares.
                 if(_drawTgam && GET_BIT(val, ScalarAgentGrid::_bitTgam)) drawTcell(TGAM, i, j);
            else if(_drawTcyt && GET_BIT(val, ScalarAgentGrid::_bitTcyt)) drawTcell(TCYT, i, j);
            else if(_drawTreg && GET_BIT(val, ScalarAgentGrid::_bitTreg)) drawTcell(TREG, i, j);
            else {
              int k=0;
              for(k=0;k<2;k++) {
                if(!(grid[i*_DIM+j]._pAgent[k] && (grid[i*_DIM+j]._pAgent[k])->getAgentType() == MAC)) continue;
                else if(drawMac(static_cast<const Mac*>(grid[i*_DIM+j]._pAgent[k]), i, j, k, minRatio, maxRatio))
                  k=3;  //Finish the loop and don't try to draw any more.
              }
              if(k>3)
                continue; //Drew *a* mac, skip extmtb later.
            }
        }
        else {
          for(int k=0;k<2;k++)
          {
            const Agent* agent = grid[i*_DIM+j]._pAgent[k];
            if(agent == NULL) continue;
            switch(agent->getAgentType())
            {
              case MAC:
               drawMac(static_cast<const Mac*>(agent), i, j, k, minRatio, maxRatio);
               break;
              default:
               drawTcell(agent->getAgentType(), i, j, k);
               break;
            }
          }
        }
        if (GET_BIT(val, ScalarAgentGrid::_bitExtMtb) && _drawExtMtb)
        {
          glColor4f(0.67f, 0.67f, 0.0f, _gridAlpha);
          drawQuad(i, j);
        }
      }

  glDisable(GL_BLEND);
  glEnable(GL_DEPTH_TEST);
}
// ######################################################################
// Prompt the user to place an object from a tray onto the table, and 
// then present them with an interface to outline that object
std::vector< Point2D<int> > promptPlaceObjectOnScene(SceneSetup const & setup, int objIdx)
{
  LINFO("Place object %d onto table. Follow instructions in User Interactive window...", objIdx);

  const SceneSetup::Object &obj = setup.objects[objIdx];

  Image< PixRGB<byte> > trayImg = Raster::ReadRGB(setup.setupPath + "/" + obj.trayFileName);
  trayImg = rescale(trayImg, trayImg.getDims()/2);

  Point2D<int> objPos(obj.trayX*trayImg.getWidth(), obj.trayY*trayImg.getHeight());

  drawCircle(trayImg, objPos, trayImg.getWidth()/12, PixRGB<byte>(255, 0, 0), 2);
  drawCross(trayImg, objPos, PixRGB<byte>(255, 0, 0), trayImg.getWidth()/8, 2);

  std::ostringstream ss;
  ss << "Place Tray " << obj.trayName << 
    " (row " << obj.trayRow << 
    ", col " << obj.trayColumn << 
    ") on table and ENTER";
  writeText(trayImg, Point2D<int>(0, 0), ss.str().c_str());

  userInteractiveWindow->drawImage(trayImg, 0, 0, true);
  while(userInteractiveWindow->getLastKeyPress() != 36) usleep(100000);
  
  pthread_mutex_lock(&imgMutex);
  Image<PixRGB<byte> > cameraImg = inputImage;
  pthread_mutex_unlock(&imgMutex);
  userInteractiveWindow->drawImage(cameraImg, 0, 0, true);
  std::vector< Point2D<int> > poly;
  std::string msg = "Outline new object using 4 points. ESCAPE to undo point. SPACE to refresh image. ENTER when finished.";

  // eat all previous mouse clicks and key presses, just in case:
  while (userInteractiveWindow->getLastMouseClick() != Point2D<int>(-1, -1)) { }
  while (userInteractiveWindow->getLastKeyPress() != -1) { }

  bool finished = false;
  while(!finished) {
    Point2D<int> mouseClick = userInteractiveWindow->getLastMouseClick();
    if(mouseClick != Point2D<int>(-1, -1) && poly.size() < 4)
      poly.push_back(mouseClick);

    int lastKeyPress = userInteractiveWindow->getLastKeyPress();
    switch(lastKeyPress) {
    case -1: // No Key
      break;
    case 9:  // ESCAPE
      if(poly.size()) poly.erase(poly.end()-1);
      break;
    case 36: // ENTER
      if(poly.size() == 4) finished = true;
      break;
    case 65: // SPACE
      pthread_mutex_lock(&imgMutex);
      cameraImg = inputImage;
      pthread_mutex_unlock(&imgMutex);
      userInteractiveWindow->drawImage(cameraImg, 0, 0, true);
      break;
    default:
      LINFO("Key Pressed: %d", lastKeyPress);
      break;
    }

    Image< PixRGB<byte> > dispImage = cameraImg;
    for(size_t i=0; i<poly.size(); ++i) drawCircle(dispImage, poly[i], 5, PixRGB<byte>(255, 0, 0), 3);
    drawOutlinedPolygon(dispImage, poly, PixRGB<byte>(0, 0, 255), Point2D<int>(0,0),0,1,0,0,3);
    writeText(dispImage, Point2D<int>(0,0), msg.c_str());
    userInteractiveWindow->drawImage(dispImage, 0, 0, true);

    usleep(100000);
  }

  LINFO("Done placing object %d onto table.", objIdx);

  return poly;
}
void ToonzExt::OverallDesigner::draw(ToonzExt::StrokeDeformation *sd)
{
	if (sd) {
		const TStroke *
			s;

		//glColor3d(1.0,0.0,1.0);
		s = sd->getCopiedStroke();
		if (s) {
			const ContextStatus *
				status = sd->getStatus();
			double
				w = 0.0,
				pixelSize = 1.0;

			if (status) {
				w = status->w_;
				pixelSize = status->pixelSize_ < 0 ? 1.0 : status->pixelSize_;
			}
#ifdef _DEBUG
			drawCross(s->getPoint(0),
					  2 * pixelSize);

			tglDrawCircle(s->getPoint(w), 8 * pixelSize);
			drawCross(s->getPoint(w), 8 * pixelSize);
#endif
			ToonzExt::Interval
				ex = sd->getExtremes();
			drawStrokeCenterLine(s,
								 pixelSize_,
								 ex);
			if (status) {
#ifdef _DEBUG
				glColor3d(0, 0, 0);
				showCorners(s,
							status->cornerSize_,
							pixelSize);

				glColor3d(0, 0, 0);
				TPointD
					offset = normalize(TPointD(1.0, 1.0)) * 20.0;
				std::ostringstream oss;
				oss << "(" << this->x_
					<< "," << this->y_
					<< ")\n{" << w
					<< ",{" << sd->getExtremes().first
					<< "," << sd->getExtremes().second
					<< "}}";
				extglDrawText(TPointD(x_, y_) + offset, oss.str());

				glColor3d(0.5, 1.0, 0.5);
				showCP(s,
					   pixelSize);
#endif
			}
		}

		/*
    glColor3d(1.0,1.0,0.0);
    s = sd->getStroke();
    if(s)
    {
      drawCross( s->getPoint(0),
                 4);

      drawStrokeCenterLine(s,
                           pixelSize_,
                           ToonzExt::Interval(0,1));
    }
    */

		s = sd->getTransformedStroke();

		glColor3d(1.0, 0.0, 0.0);
		if (s) {
#ifdef _DEBUG
			isValid(s);
#endif
			drawStrokeCenterline(*s,
								 pixelSize_);
		}

#ifdef _DEBUG
		{
			const TStroke
				*c = sd->getCopiedStroke(),
				*s = sd->getStroke();
			if (c && s) {
				//glColor3d(1,1,0);
				//tglDrawDisk(s->getPoint(0.0),
				//            5*pixelSize_);

				int count = std::min(c->getControlPointCount(),
									 s->getControlPointCount());
				for (int i = 0;
					 i < count;
					 ++i) {
					TThickPoint
						ccp = c->getControlPoint(i);
					TThickPoint
						scp = s->getControlPoint(i);
				}
			}
		}
#endif
	}
}
Exemple #19
0
void* camera(void* arg) {
	//pFormatCtx=(AVFormatContext *)arg;
	char key;
	drawing=false;
	Ball.roll = Ball.pitch = Ball.gaz = Ball.yaw = 0;
	pthread_mutex_init(&mutexVideo, NULL);
	liste.suivant=NULL;
#if output_video == ov_remote_ffmpeg
	pthread_t ii;
	pthread_create(&ii, NULL, getimg, NULL);
#else	
	VideoCapture cap(0); //capture video webcam
#endif



#if output_video != ov_remote_ffmpeg

	if (!cap.isOpened()) {
		cout << "Impossible de lire le flux de la camera" << endl;
		return NULL;
	}
	Mat frame;
	cap >> frame;
	fSize.width = frame.cols;
	fSize.height = frame.rows;
#endif

	// Initialise les fenetres
	namedWindow(winDetected, 1);
	namedWindow(winOutputVideo, 1);

	//Creer une image noir de taille de notre image tmp
	Mat imgLines = Mat::zeros(fSize.height, fSize.width, CV_8UC3);

	while (true) {

#if output_video != ov_remote_ffmpeg
		bool bSuccess = cap.read(imgOriginal); // Nouvelle capture
		if (!bSuccess) {
			cout << "Impossible de lire le flux video" << endl;
			break;
		}
#else
		pthread_mutex_lock(&mutexVideo);
		memcpy(img->imageData, pFrameBGR->data[0], pCodecCtx->width * ((pCodecCtx->height == 368) ? 360 : pCodecCtx->height) * sizeof(uint8_t) * 3);
		pthread_mutex_unlock(&mutexVideo);
		imgOriginal = cv::cvarrToMat(img, true);
#endif
		pthread_t mtId,ocId;
		//Appel aux threads de tracking
		pthread_create(&mtId, NULL, &matchTemplate, NULL);
		pthread_create(&ocId, NULL, &opencv, NULL);
		
		pthread_join(mtId,NULL);
		pthread_join(ocId,NULL);

		//Fonction permettant d'interpreter les résultats des deux tracking
		Ball.setRealPos();

		// Genere la fenetre de repere 
		imgLines.setTo(Scalar(255, 255, 255));
		drawCross(imgLines, fSize.width / 2, fSize.height / 2, Scalar(0, 0, 255));
		drawCross(imgLines, posX, posY, Scalar(0, 255, 0));

		imgOriginal = imgOriginal & imgLines; // Croise les resultats à la fenetre de sortie //

		// Affichage des fenetre //
		imshow(winDetected, imgDetection);			//Pour montrer l image avec le masque
		//imshow(winRepere, imgLines);				//Pour montrer la fenetre de repere
		imshow(winOutputVideo, imgOriginal);		//Image d origine
		string Action = "Mouvement a effectuer : ";
		ObjCoord tmp = Ball.getRealPos();
		cout << "x " << tmp.Xcoord << " y " << tmp.Ycoord << " z " << tmp.Zcoord << endl;
		/*
		if(tmp.Zcoord == -1){
			Action += "Recule, "; Ball.pitch = 0.05f;
		}
		else if(tmp.Zcoord == 1){
			Action += "Avance, "; Ball.pitch = -0.05f;
		}
		else
		{
			Ball.pitch = 0;
		}
		*/
		if (tmp.Xcoord <= 35.0 && tmp.Xcoord != 0) {
			Ball.yaw = -0.2f;
			Action += "Gauche ("+ to_string(Ball.yaw)+"%), ";
		} else if (tmp.Xcoord >= 65.0) {
			Ball.yaw = 0.2f;
			Action += "Droite ("+ to_string(Ball.yaw)+"%), ";
		}
		else
		{
			Ball.yaw = 0;	
		}
		if (tmp.Ycoord >= 65.0) {
			Action += "Descendre";  Ball.gaz = -0.2f;
		} else if (tmp.Ycoord <= 35.0 && tmp.Ycoord != 0) {
			Action += "Monter";    Ball.gaz = 0.2f;
		}
		else
		{
			Ball.gaz = 0;
		}
		/*if(Ball.pitch != 0) {
			Ball.roll = Ball.yaw / 2;
			Ball.yaw = 0;
		}*/
		if(tmp.Xcoord == 0 && tmp.Ycoord == 0 && tmp.Zcoord == 0)
		{
			Ball.roll = Ball.pitch = Ball.gaz = Ball.yaw = 0;
			
		}
		if(Ball.pitch == 0)
			AtCmd::sendMovement(0, Ball.roll, Ball.pitch, Ball.gaz, Ball.yaw);
		else
			AtCmd::sendMovement(3, Ball.roll, Ball.pitch, Ball.gaz, Ball.yaw);
		//cout << Action << endl;
		key=waitKey(10);
		if(key == 10)
		{
			enVol=true;
			key=-1;
		}
		else if (key != -1) //Attends qu'une touche soit presser pour quitter
		{
			break;
		}
	}
	stopTracking=true;
	destroyAllWindows();
	return NULL;
}
// --------------------------------------------------------
void TouchDisplayWidgetRenderer::paint( GLResourceContainer * container, int pass, TouchWidgetRenderer * to_texture_renderer ) const
{
	Q_UNUSED(to_texture_renderer);
	if(pass==0)
	{
		// draw line
		if(_twm->currentBallTarget() != NULL)
		{
			const RigidWidgetController * ball = _twm->currentBallTarget()->first;
			const RigidWidgetController * target = _twm->currentBallTarget()->second;
			glActiveTexture(GL_TEXTURE0);
			glBindTexture(GL_TEXTURE_2D, 0);
			glLineWidth(2);
			//glColor4f(240.0/255,198.0/255,0,1);
			glColor4f(1,1,1,1);
			glBegin(GL_LINE_STRIP);
			glVertex2f(ball->pos().x(), ball->pos().y());
			glVertex2f(target->pos().x(), target->pos().y());
			glEnd();
		}

		foreach(const SceneTouchPoint * stp, _tpm->touches())
		{
			// draw bubble
			//const QList<RigidWidgetController *> & closest_widgets = stp->closestRigidWidgets();
			//if(closest_widgets.size()>=2)
			//{
			//	// labeled as in the bubble cursor 2005 paper
			//	//float con_d_i = QLineF(closest_widgets[0]->pos(), stp->transformedPoint()->pos()).length() + closest_widgets[0]->radius();
			//	//float int_d_j = QLineF(closest_widgets[1]->pos(), stp->transformedPoint()->pos()).length() - closest_widgets[1]->radius();
			//	float con_d_i = closest_widgets[0]->containmentDistance(stp->transformedPoint()->pos());
			//	float int_d_j = closest_widgets[1]->intersectingDistance(stp->transformedPoint()->pos());
			//	float radius = qMin(con_d_i, int_d_j);
			//	drawTexturedQuad(container->texture("halo"), stp->transformedPoint()->pos(), QSizeF(radius, radius)*2, 0, 0.5f);
			//}

			QRectF bubble_rect = stp->bubbleRect();
			if(!bubble_rect.isEmpty())
			{
				Q_ASSERT(QLineF(stp->pos(), bubble_rect.center()).length() < 0.001f);
				drawTexturedQuad(container->texture("halo"), stp->pos(), bubble_rect.size(), 0, 0.3f);
			}

			// we don't want to show all the gory details in magnifying glasses
			if(to_texture_renderer!=NULL)
			{
				//qDebug() << stp->visibilityInMagnification();
				if(stp->visibilityInMagnification() == SceneTouchPoint::NeverVisible)
					continue;
				if(stp->visibilityInMagnification() == SceneTouchPoint::VisibleOnlyWithAreaCursor)
				{
					if(!stp->isFree() || _twm->selectionMode()!=DirectAreaSelection)
					//if(_twm->selectionMode()!=DirectAreaSelection)
						continue;
				}
			}
			else
			{
				if(!stp->visibleInScene())
					continue;
			}

			if(_tool_setting_container.boolProperty("simplified_mg") && to_texture_renderer!=NULL)
				continue;

			// determine texture name
			QString texture_name("touch_bright/touch");
			if(stp->state() == Qt::TouchPointPressed)
			{
				texture_name += "_new";
			}
			else
			{
				texture_name += (stp->numPressedWidgets() == 0) ? "_free" : "_bound";
				// TODO: SETTING "SHOW YOUNG CURSORS"
				//if(stp->isYoung())
				//{
				//	texture_name += "_young";
				//}
			}
			// geometry

			const QRectF & rect = stp->rect();
			
			if(_tool_setting_container.boolProperty("pointy_cursor"))
			{
				drawTexturedQuad(container->texture(texture_name), rect.center(), rect.size(), 0, 0.25f);
				drawHollowCircle(container, rect.center(), rect.size().width()/2*1.2f, 0.5f, Qt::white, 0.0004f, 300);
				drawCross(container, rect.center(), rect.size()*1.2f);
			}
			else
			{
				drawTexturedQuad(container->texture(texture_name), rect.center(), rect.size(), 0, 0.25f);
				drawHollowCircle(container, rect.center(), rect.size().width()/2*1.2f, 1.0f, Qt::white, 0.0005f, 300);
			}
		}
	}
}
void Drawing::DrawingImpl::paint()
{
	if (texture == 0) {
		return;
	}

#if MEASURE
	struct timeval fir; gettimeofday(&fir, NULL);
#endif

	glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	// default projection: glOrtho(l=-1,r=1,b=-1,t=1,n=1,f=-1)
	glOrtho(0, texture->width(), 0, texture->height(), -1.0f, 4.0f);
	throwErrorFromGLError();

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glTranslatef(0.0f, 0.0f, -3.5f);

	// OpenGL origin = bottom-left
	// DevIL origin = top-left
	// => swap y coordinates in glTexCoord2f

	// Texture
	glEnable(GL_TEXTURE_2D);
	texture->bind();
	glBegin(GL_POLYGON);
	glTexCoord2i(0, 1);
	glVertex2i(0, 0);
	glTexCoord2i(0, 0);
	glVertex2i(0, texture->height());
	glTexCoord2i(1, 0);
	glVertex2i(texture->width(), texture->height());
	glTexCoord2i(1, 1);
	glVertex2i(texture->width(), 0);
	glEnd();
	glDisable(GL_TEXTURE_2D);

	if (show_[ShowTriangulation]) {
		glTranslatef(0.0f, 0.0f, 0.2f);
		// light blue
		glColor3f(0.5f, 0.8f, 1.0f);
		glLineWidth(2.0f);
		for (std::vector<Triangle>::const_iterator it = triangulation.begin();
		     it != triangulation.end();
		     it++) {
			glBegin(GL_LINE_LOOP);

			for (int i = 0; i < 3; i++) {
				unsigned int x = (*it)[i].x;
				unsigned int y = texture->height() - 1 - (*it)[i].y;
				glVertex2i(x, y);
			}

			glEnd();
		}
	}

	if (show_[ShowRoomTriangulation]) {
		glTranslatef(0.0f, 0.0f, 0.2f);
		// lighter blue
		glColor3f(0.6f, 0.9f, 1.0f);
		glLineWidth(2.0f);
		for (std::vector<Triangle>::const_iterator it = roomTriangulation.begin();
		     it != roomTriangulation.end();
		     it++) {
			glBegin(GL_LINE_LOOP);

			for (int i = 0; i < 3; i++) {
				unsigned int x = (*it)[i].x;
				unsigned int y = texture->height() - 1 - (*it)[i].y;
				glVertex2i(x, y);
			}

			glEnd();
		}
	}

	// Waypoints
	glBindBuffer(GL_ARRAY_BUFFER, circleVBO);
	glEnableClientState(GL_VERTEX_ARRAY);
	glVertexPointer(2, GL_DOUBLE, 0, 0);
	glBindBuffer(GL_ARRAY_BUFFER, 0);

	if (show_[ShowWaypoints]) {
		glTranslatef(0.0f, 0.0f, 0.2f);
		// yellow
		glColor3f(1.0f, 1.0f, 0.0f);
		std::set<Coord2D> const &waypoints = room->getWaypoints();
		for (std::set<Coord2D>::const_iterator it = waypoints.begin(); it != waypoints.end(); it++) {
			drawPoint(it->x, it->y);
		}
	}

#if 0
	for (std::vector< std::vector<Edge> >::const_iterator it = edges.begin(); it != edges.end(); it++) {
		unsigned char c1, c2, c3;

		c1 = rand() % 256;
		c2 = rand() % 256;
		c3 = rand() % 256;

		glColor3b(c1, c2, c3);
		glBegin(GL_LINES);
		for (std::vector<Edge>::const_iterator eit = it->begin(); eit != it->end(); eit++) {
			Edge edge = *eit;
			glVertex2f(edge.start.x, texture->height() - 1 - edge.start.y);
			glVertex2f(edge.end.x, texture->height() - 1 - edge.end.y);
		}
		glEnd();
	}
#endif

	for (std::map<Coord2D, bool>::const_iterator it = neighbourToShowNeighbours.begin();
	     it != neighbourToShowNeighbours.end();
	     it++) {
		if (!it->second) {
			glColor3b(0, 127, 0);
		} else {
			glColor3b(127, 0 ,0);
			glLineWidth(2.0f);
		}

		glBegin(GL_LINES);
		glVertex2f(neighbourToShow.x, texture->height() - 1 - neighbourToShow.y);
		glVertex2f(it->first.x, texture->height() - 1 - it->first.y);
		glEnd();
	}

	if (show_[ShowPath] && path.size() > 0) {
		glTranslatef(0.0f, 0.0f, 0.2f);
		// dark-yellow
		//glColor3f(0.7f, 0.7f, 0.0f);
		glColor3f(0.2f, 0.2f, 0.2f);
		glLineWidth(2.0f);
		glBegin(GL_LINE_STRIP);
		for (std::vector< Coord2DTemplate<float> >::const_iterator it = pathPoints.begin();
		     it != pathPoints.end();
		     ++it) {
			glVertex2f(it->x, texture->height() - 1 - it->y);
		}
		glEnd();
	}

	glTranslatef(0.0f, 0.0f, 0.2f);
	glColor3f(1.0f, 0.0f, 0.0f);
	drawPoint(room->getEndpoint().x, room->getEndpoint().y);

	glColor3f(0.0f, 1.0f, 0.0f);
	drawPoint(room->getStartpoint().x, room->getStartpoint().y);

	// Collisions
	if (show_[ShowPath] && !pathCollisions.empty()) {
		glBindBuffer(GL_ARRAY_BUFFER, crossVBO);
		glEnableClientState(GL_VERTEX_ARRAY);
		glVertexPointer(2, GL_DOUBLE, 0, 0);
		glBindBuffer(GL_ARRAY_BUFFER, 0);
		glColor3f(1.0f, 0.0f, 0.0f);

		for (std::set< Coord2DTemplate<float> >::const_iterator it = pathCollisions.begin(); it != pathCollisions.end(); ++it) {
			drawCross(it->x, it->y);
		}
	}

	if (animated) {
		glColor3f(0.63f, 0.13f, 0.94f);
		drawPoint(animationPosition->x, animationPosition->y);
	}

	glDisableClientState(GL_VERTEX_ARRAY);

#if MEASURE
	struct timeval sec; gettimeofday(&sec, NULL);
	struct timeval res; timersub(&sec, &fir, &res);
	statusText_->setText(statusText_->tr("rendering: %1 sec %2 usec\n").arg(res.tv_sec).arg(res.tv_usec));
#endif
}
// ######################################################################
void CenterSurroundHistogramSegmenter::drawCurrentCSbelief
(Point2D<int> pt, Rectangle grC, Rectangle grS)
{
  uint width  = itsImage.getWidth();
  uint height = itsImage.getHeight();
  if(itsWin.is_invalid())
    itsWin.reset(new XWinManaged(Dims(2*width, height), 0, 0, "CSHse"));
  else itsWin->setDims(Dims(2*width, height));

  uint gwidth  = width/GRID_SIZE;
  uint gheight = height/GRID_SIZE;

  // display the window      
  Image<PixRGB<byte> > disp(2*width, height, ZEROS);
  inplacePaste(disp, itsImage, Point2D<int>(0,0));
  if(pt.isValid())
    {
      drawCross(disp, pt, PixRGB<byte>(255,0,0), 10, 1);
    }
  if(grC.isValid())
    {
      drawRect(disp, grC*GRID_SIZE, PixRGB<byte>(255,0,0), 1);
      drawRect(disp, grS*GRID_SIZE, PixRGB<byte>(0,255,0), 1);
    }

  float mVal = 32;
  float bVal = 255 - mVal;
  
  Image<byte> dImaR, dImaG, dImaB;
  getComponents(itsImage, dImaR, dImaG, dImaB);
  inplaceNormalize(dImaR, byte(0), byte(mVal));
  inplaceNormalize(dImaG, byte(0), byte(mVal));
  inplaceNormalize(dImaB, byte(0), byte(mVal));
  Image<PixRGB<byte> > dIma  = makeRGB(dImaR,dImaG,dImaB);      
  
  // Image<float> dImaCf = itsGridCenterBelief;
  // inplaceNormalize(dImaCf, 0.0F, bVal);
  // Image<byte> dImaCb(dImaCf);
  // Image<PixRGB<byte> > dImaC = makeRGB(dImaCb,dImaCb,dImaCb);
  
  // Image<float> dImaSf = itsGridSurroundBelief;
  // inplaceNormalize(dImaSf, 0.0F, bVal);
  // Image<byte> dImaSb(dImaSf);
  // Image<PixRGB<byte> > dImaS = makeRGB(dImaSb,dImaSb,dImaSb);

  // Image<PixRGB<byte> > tdImaC(dIma+zoomXY(dImaC,GRID_SIZE));
  // Image<PixRGB<byte> > tdImaS(dIma+zoomXY(dImaS,GRID_SIZE));
  // inplacePaste (disp, tdImaC, Point2D<int>(width,0));
  // inplacePaste (disp, tdImaS, Point2D<int>(2*width,0));
 
  Image<float> dImaCSf = 
    clampedDiff((itsGridCenterBelief - itsGridSurroundBelief), 
              Image<float>(gwidth,gheight,ZEROS));
  inplaceNormalize(dImaCSf, 0.0F, bVal);
  Image<byte> dImaCSb(dImaCSf);
  Image<PixRGB<byte> > dImaCS = makeRGB(dImaCSb,dImaCSb,dImaCSb);
  Image<PixRGB<byte> > tdImaCS(dIma+zoomXY(dImaCS,GRID_SIZE));
  inplacePaste (disp, tdImaCS, Point2D<int>(width,0));

  Point2D<int> noff (width,0);
  drawCross(disp, pt+noff, PixRGB<byte>(255,0,0), 10, 1);
 
  if(itsCSrectangle.isValid())
    {
      drawRect(disp, itsCSrectangle*GRID_SIZE,        PixRGB<byte>(255,0,0), 1);
      drawRect(disp, (itsCSrectangle*GRID_SIZE)+noff, PixRGB<byte>(255,0,0), 1);
    }

  itsWin->drawImage(disp,0,0);
  Raster::waitForKey();
}
void CalloutNote::drawNoteNodeConnector(QPainter *painter, bool visible)
{	
	float width;
	float height;
	QRectF itemPos;
	if(_scene->_pathBubbles[_pid]==NULL) //does not work
		return;
	PathBubble1* path=_scene->_pathBubbles[_pid];
	if(!path || path==NULL || !path->isVisible())
		return;

	QPointF dis1=this->sceneBoundingRect().center();
	QPointF dis2=path->sceneBoundingRect().center();

	if(fixedSize)
	{		
		width=graphReferenceSize/2*path->_scale; height=graphReferenceSize/2*path->_scale;
	}
	else
	{
		width=this->realRect().width()/2*path->_scale;
	    height=this->realRect().height()/2*path->_scale;
	}

	if(_type == 'L' && _id>=path->ANodeRect.size())
		return;

	switch(_type)
	{
		 case 'C': itemPos = path->complexRect[_id];  break;
		 case 'E': itemPos = path->physicalEntityRect[_id];  break;
		 case 'P': itemPos = path->proteinRect[_id];  break;				
		 case 'S': itemPos = path->smallMoleculeRect[_id];  break;
		 case 'D': itemPos = path->DnaRect[_id];  break;
		 case 'R': itemPos = path->reactionRect[_id];  break;
		 case 'L': itemPos = path->ANodeRect[_id];  break;
		 case 'M': itemPos = path->compartmentRect[_id];  break;
	}

	
	QPointF start,end;
	
	start=QPointF(0,0);
	end=itemPos.center();

	start=start+dis1;
	end=end+dis2;

	QRectF noteRect=this->sceneBoundingRect();	
	QRectF pathRect=path->sceneBoundingRect();
	
	float w,h;
	w=realRect().width(),  h=realRect().height();
	noteRect=QRectF(noteRect.center().x()-w/2, noteRect.center().y()-h/2, w, h );

	QPointF markPos(end.x()+itemPos.width()*0.5, end.y()-itemPos.height()*0.1);
	_deleteMark = QRectF(noteRect.x()+noteRect.width()*0.90, noteRect.y()+noteRect.height()*0.03, noteRect.width()*0.08, noteRect.width()*0.08);	
	_minimizeMark = QRectF(noteRect.x()+noteRect.width()*0.80, noteRect.y()+noteRect.height()*0.03, noteRect.width()*0.08, noteRect.width()*0.08);	

	

	//clip	    
	QColor c=QColor(_colorBoarder.a, _colorBoarder.b, _colorBoarder.c, 255);
	if(visible)
	{		
		
		if(!noteRect.contains(end))
		{
			drawArrow_5(painter, start, markPos-QPointF(itemPos.width()*0.04,0), QRect(noteRect.center().x()-w/2,noteRect.center().y()-h/2, w, h ), width, height, c );						
		}		
		drawCross(painter,_deleteMark, c);
		drawMinus(painter,_minimizeMark, c);
	}
	else 
	{
		//if within the path bubble
		if(pathRect.contains(start)|| pathRect.contains(end))
		   drawNoteMark(painter, start, end,  QRect(noteRect.center().x()-w/2,noteRect.center().y()-h/2, w, h ), markPos, markRect, width, height, c );						
	}		
}
Exemple #24
0
// quadview
void display4(void)
{
	glViewport(0, 0, width, height);
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	gluOrtho2D(0, width, 0, height);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glDisable(GL_LIGHTING);
	
	const int half_w=width>>1, half_h=height>>1;
	
	glColor3fv(edge_color);
	drawCross(half_w, half_h, width, height);
	
	int offset=height/20;
	const int quarter_w=width>>2, quater_h=height>>2;
	const int axis_w=half_w-offset, axis_h=half_h-offset;
	
	glColor3fv(axis_color);
	drawAxis(half_w + quarter_w, quater_h, axis_w, axis_h); // right
	drawAxis(         quarter_w, quater_h, axis_w, axis_h); // up
	drawAxis(quarter_w, half_h + quater_h, axis_w, axis_h); // back
	
	glColor3fv(grid_color);
	drawGrid(half_w + quarter_w, quater_h, half_w, half_h); // right
	drawGrid(         quarter_w, quater_h, half_w, half_h); // up
	drawGrid(quarter_w, half_h + quater_h, half_w, half_h); // back
	
	offset=5;
	glColor3fv(font_color);
	printText(offset, offset, "Front");
	printText(half_w+offset, offset, "Right");
	printText(offset, half_h+offset, "Top");
	printText(half_w+offset, half_h+offset, "Perspective");

	glEnable(GL_LIGHTING);
	glEnable(GL_SCISSOR_TEST);

	// bottom left window, back.
	glViewport(0, 0, half_w, half_h);
	glScissor(0, 0, half_w, half_h);
	projection(half_w, half_h, false);
	glRotatef(spin_y, 1.0, 0.0, 0.0);
	glRotatef(spin_x, 0.0, 1.0, 0.0);
	glCallList(torus_list);

	// bottom right window, right.
	glViewport(half_w, 0, half_w, half_h);
	glScissor(half_w, 0, half_w, half_h);
	projection(half_w, half_h, false);
	glRotatef(90.0, 0.0, 1.0, 0.0);
	glRotatef(spin_y, 1.0, 0.0, 0.0);
	glRotatef(spin_x, 0.0, 1.0, 0.0);
	glCallList(torus_list);

	// top left window, up.
	glViewport(0, half_h, half_w, half_h);
	glScissor(0, half_h, half_w, half_h);
	projection(half_w, half_h, false);
	glRotatef(-90.0, 1.0, 0.0, 0.0);
	glRotatef(spin_y, 1.0, 0.0, 0.0);
	glRotatef(spin_x, 0.0, 1.0, 0.0);
	glCallList(torus_list);

	// top right window, perspective.
	glViewport(half_w, half_h, half_w, half_h);
	glScissor(half_w, half_h, half_w, half_h);
	projection(half_w, half_h, true);
	glRotatef(spin_y, 1.0, 0.0, 0.0);
	glRotatef(spin_x, 0.0, 1.0, 0.0);
	glCallList(torus_list);

	glDisable(GL_SCISSOR_TEST);

	glutSwapBuffers();
}
Exemple #25
0
/**************************************************************************//**
 * @brief  Main function
 *****************************************************************************/
int main(void)
{
  uint32_t buttons;
  POINT touchSample, P[ 3 ];
  ADC_Init_TypeDef init = ADC_INIT_DEFAULT;

  /* Configure for 48MHz HFXO operation of core clock */
  CMU_ClockSelectSet(cmuClock_HF, cmuSelect_HFXO);

  /* Initialize DK board register access */
  BSP_Init(BSP_INIT_DEFAULT);

  /* If first word of user data page is non-zero, enable eA Profiler trace */
  BSP_TraceProfilerSetup();

  CMU_ClockEnable(cmuClock_GPIO, true);

  CMU_ClockEnable( cmuClock_ADC0, true);
  /* Max ADC clock is 13MHz, use 14MHz/(1+1) or 48MHz/(5+1) */
  init.prescale = 5;
  ADC_Init(ADC0, &init);
  sInit.reference = adcRefVDD;

  /* Set frame buffer start address */
  frameBuffer = (uint16_t *) EBI_BankAddress(EBI_BANK2);

  /* Make sure CYCCNT is running, needed by delay functions. */
  DWT_CTRL |= 1;

  /* Indicate we are waiting for AEM button state enabling EFM */
  BSP_LedsSet(0x8001);
  while (BSP_RegisterRead(&BC_REGISTER->UIF_AEM) != BC_UIF_AEM_EFM)
  {
    /* Show a short "strobe light" on DK LEDs, indicating wait */
    BSP_LedsSet(0x8001);
    delayMs(200);
    BSP_LedsSet(0x4002);
    delayMs(50);
  }

  /* Initialize touch screen calibration factor matrix with approx. values  */
  setCalibrationMatrix( (POINT*)&lcdCalibPoints,   /* Display coordinates   */
                        (POINT*)&touchCalibPoints, /* Touch coordinates     */
                        &calibFactors );      /* Calibration factor matrix  */

  while (1)
  {
    if ( TFT_DirectInit(&tftInit) )
    {
      displayHelpScreen();
      BSP_LedsSet(0x0000);
      BSP_PeripheralAccess(BSP_TOUCH, true);
      GPIO_PinModeSet(LCD_TOUCH_X1, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_X2, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_Y1, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_Y2, gpioModeInput, 0);

      do
      {
        buttons = readButtons();

        /* Draw on screen */
        if ( buttons & BC_UIF_PB1 )
        {
          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */

          do
          {
            if ( touched() )
            {
              touchSample = getTouchSample();
              drawPixel( touchSample.x, touchSample.y, COLOR );
            }
            delayMs( 2 );

            buttons = readButtons() & ~BC_UIF_PB1;
          } while ( buttons == 0 );
        }

        /* Calibrate touch screen */
        else if ( buttons & BC_UIF_PB2 )
        {
          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 0 ].x, lcdCalibPoints[ 0 ].y, COLOR );
          TFT_DrawString(30, 35, "Tap green marker" );
          P[ 0 ] = getTouchTapSample10bit();

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 1 ].x, lcdCalibPoints[ 1 ].y, COLOR );
          TFT_DrawString(40, 130, "Tap green marker" );
          P[ 1 ] = getTouchTapSample10bit();

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 2 ].x, lcdCalibPoints[ 2 ].y, COLOR );
          TFT_DrawString(20, 180, "Tap green marker" );
          P[ 2 ] = getTouchTapSample10bit();

          setCalibrationMatrix( (POINT*)&lcdCalibPoints,/* Display coordinates*/
                                &P[0],                  /* Touch coordinates  */
                                &calibFactors );  /* Calibration factor matrix*/

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          TFT_DrawString(10, 100, "The touch screen is" );
          TFT_DrawString(30, 130, "now calibrated !" );
        }

        /* Display help screen */
        else if ( buttons & BC_UIF_PB3 )
        {
          displayHelpScreen();
          while ( readButtons() & BC_UIF_PB3 ) {}
        }

      } while ( ( buttons & EXIT_LOOP ) == 0 );
    }
    else
    {
      BSP_LedsSet(0x8001);
      delayMs(200);
    }
  }
}
Exemple #26
0
// ######################################################################
int main(const int argc, const char **argv)
{
  MYLOGVERB = LOG_INFO;  // suppress debug messages

  volatile int signum = 0;
  catchsignals(&signum);

  ModelManager manager("Test Motion Energy");

  nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager));
  manager.addSubComponent(ifs);

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager));
  manager.addSubComponent(ofs);

  nub::ref<FoeDetector> fd(new FoeDetector(manager));
  manager.addSubComponent(fd);

  if (manager.parseCommandLine((const int)argc, (const char**)argv,
                               "<stimuli> <options>", 0, 9) == false)
    return(1);

  fd->reset(NUM_PYR_LEVEL, NUM_DIRS, NUM_SPEEDS);

  std::string stimuli("Image");
  if(manager.numExtraArgs() > 0)
    stimuli = manager.getExtraArgAs<std::string>(0);
  LINFO("Stimuli: %s", stimuli.c_str());

  manager.start();

  Timer timer(1000000);
  timer.reset();  // reset the timer
  int frame = 0;

  PauseWaiter p;

  uint step; step = 0;

  // to get to the good part
  //for(uint i = 0; i < 50; i++) //was 25
  //  ifs->updateNext();

  // get ground truth file 
  std::string gtFilename
    ("/lab/tmpib/u/siagian/neuroscience/Data/FOE/driving_nat_Browning.txt");
  std::vector<Point2D<int> > gt = getGT(gtFilename);
  int ldpos = gtFilename.find_last_of('.');
  std::string prefix = gtFilename.substr(0, ldpos);

  // for finding ground truth
  rutz::shared_ptr<XWinManaged> win;
  
  float totalErr = 0.0;

  std::vector<std::string> args;
  for(uint i = 0; i < manager.numExtraArgs(); i++)
    args.push_back(manager.getExtraArgAs<std::string>(i)); 

  Image<byte> prevLum;
  Image<PixRGB<byte> > prevImage;
  Image<PixRGB<byte> > prevImage2;
  while (1)
    {
      if (signum != 0)
        {
          LINFO("quitting because %s was caught", signame(signum));
          break;
        }

      if (ofs->becameVoid())
        {
          LINFO("quitting because output stream was closed or became void");
          break;
        }

      if (p.checkPause())
        continue;

      const FrameState is = ifs->updateNext();
      if (is == FRAME_COMPLETE) break; // done receiving frames

      Image< PixRGB<byte> > input = ifs->readRGB();
      if(frame == 0) 
        {
          uint width  = input.getWidth();
          uint height = input.getHeight();
          win.reset(new XWinManaged(Dims(width, height), 0, 0, "GT"));
        }

      // empty image signifies end-of-stream
      if (!input.initialized()) break;
      Image<byte> lum = luminance(input);
      Point2D<float> pshift(0.0,0.0); 
      if(step != 0)
        {
          // calculate planar shift using SIFT 
          lum = calculateShift(lum,prevLum, ofs);
        }
      if( manager.numExtraArgs() > 0)
        lum = getImage(stimuli, args, fd, step);

      // for saving videos
      prevImage2 = prevImage;
      prevImage  = input;

      if (!lum.initialized()) break; step++;

      // compute the focus of expansion (FOE)
      Point2D<int> foe = fd->getFoe(lum, FOE_METHOD_TEMPLATE, false);
      //Point2D<int> foe = fd->getFoe(lum, FOE_METHOD_AVERAGE);
      LINFO("[%d]Foe: %d %d", frame, foe.i, foe.j);

      // illustration of the size of the receptive field
      if(!stimuli.compare("ShowRF"))
        {
          uint rfI = 44;
          uint rfJ = 152;
          lum.setVal(rfI, rfJ, 300.0F);      
          drawRect(lum, Rectangle::tlbrI(144,36,159,51), byte(255));
          drawRect(lum, Rectangle::tlbrI(148,40,155,47), byte(255));
          
          drawRect(lum, Rectangle::tlbrI(rfJ-8, rfI-8, rfJ+8, rfI+8), byte(255));
          drawRect(lum, Rectangle::tlbrI(rfJ-16,rfI-16,rfJ+16,rfI+16), byte(255));
        }

      ofs->writeGrayLayout(fd->getMTfeaturesDisplay(lum), "MT Features",
                           FrameInfo("motion energy output images", SRC_POS));

      // write the file
      if(frame >= 4)
        {
          float err = foe.distance(gt[frame-2]); 
          totalErr += err;
          LINFO("Foe: %d %d: GT: %d %d --> %f --> avg: %f", 
                foe.i, foe.j, gt[frame-2].i, gt[frame-2].j, 
                err, totalErr/(frame-3));

          Image<PixRGB<byte> > simg = prevImage2;
          drawCross(simg, foe        , PixRGB<byte>(0,255,0), 10, 2);
          drawCross(simg, gt[frame-2], PixRGB<byte>(255,0,0), 10, 2);
          win->drawImage(simg,0,0);
          //Raster::WriteRGB(simg, sformat("%s_STnPS_%06d.ppm", prefix.c_str(), frame-2));
        }

      //ofs->writeGrayLayout
      //  (lum, "test-FOE Main", FrameInfo("foe output", SRC_POS));
      const FrameState os = ofs->updateNext();
      //LINFO("frame[%d]: %8.3f %8.3f", frame, pshift.i, pshift.j); 
      Raster::waitForKey();

      if (os == FRAME_FINAL)
        break;

      prevLum  = lum;
      frame++;
    }

  LINFO("%d frames in %gs (%.2ffps)\n", 
        frame, timer.getSecs(), frame / timer.getSecs());

  // stop all our ModelComponents
  manager.stop();

  // all done!
  return 0;

}
Exemple #27
0
/**************************************************************************//**
 * @brief  Main function
 *****************************************************************************/
int main(void)
{
  uint32_t buttons;
  POINT P[ 3 ];
  TOUCH_Config_TypeDef touch_config = TOUCH_INIT_DEFAULT;
  const char readmeText[] = \
    "USB Bitmap transfer using USB drive functionality.\r\n\r\n"\
    "This example demonstrate use several functionalities:\r\n"\
    "1. Creation of virtual drive in system with FAT FS,\r\n"\
    "2. Mounting the drive on PC and file transfer,\r\n"\
    "3. Bitmap file creation based on TFT frame buffer content,\r\n"\
    "4. Resistive touch panel interaction.\r\n\r\n"\
    "On system startup initial drive is created and\r\n"\
    "formatted using FAT FS then simple readme.txt file\r\n"\
    "is put on file system. Every time user press PB4 key\r\n"\
    "new file, containing TFT frame buffer in bitmap format\r\n"\
    "is added. All files could be retrieved after connecting\r\n"\
    "board to PC by means of USB. For this connection use\r\n"\
    "small USB socket located on Leopard Gecko CPU board, not\r\n"\
    "the big one on development kit.\r\n\r\n"\
    "If new files doesn't appear on drive after pressing PB4,\r\n"\
    "try to reconnect the board to PC.\r\n\r\n"\
    "Board:  Energy Micro EFM32LG-DK3650 Development Kit\r\n"\
    "Device: EFM32LG990F256\r\n";


  /* Configure for 48MHz HFXO operation of core clock */
  CMU_ClockSelectSet(cmuClock_HF, cmuSelect_HFXO);

  /* Initialize DK board register access */
  BSP_Init(BSP_INIT_DEFAULT);

  /* If first word of user data page is non-zero, enable eA Profiler trace */
  BSP_TraceProfilerSetup();

  CMU_ClockEnable(cmuClock_GPIO, true);

  CMU_ClockEnable( cmuClock_ADC0, true);

  /* Set frame buffer start address */
  frameBuffer = (uint16_t *) EBI_BankAddress(EBI_BANK2);

  /* Make sure CYCCNT is running, needed by delay functions. */
  DWT_CTRL |= 1;

  /* Initialize USB subsystem and prepare for taking pictures */
  BITMAP_Init();
  /* Create our first file on disk - simple readme */
  BITMAP_CreateFileAndSaveData("README.TXT", readmeText, sizeof(readmeText));

  /* Indicate we are waiting for AEM button state enabling EFM */
  while (BSP_RegisterRead(&BC_REGISTER->UIF_AEM) != BC_UIF_AEM_EFM)
  {
    /* Show a short "strobe light" on DK LEDs, indicating wait */
    BSP_LedsSet(0x8001);
    delayMs(100);
    BSP_LedsSet(0x4002);
    delayMs(100);
  }

  touch_config.frequency = 13000000; /* use max ADC frequency */
  touch_config.ignore = 0;           /* notice every move, even 1 pixel */
  TOUCH_Init(&touch_config);

  /* Initialize touch screen calibration factor matrix with approx. values  */
  setCalibrationMatrix( (POINT*)&lcdCalibPoints,   /* Display coordinates   */
                        (POINT*)&touchCalibPoints, /* Touch coordinates     */
                        &calibFactors );      /* Calibration factor matrix  */
  while (1)
  {
    delayMs(100);
    if ( TFT_DirectInit(&tftInit) )
    {
      delayMs(100);
      displayHelpScreen();
      BSP_LedsSet(0x0000);
      BSP_PeripheralAccess(BSP_TOUCH, true);
      GPIO_PinModeSet(LCD_TOUCH_X1, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_X2, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_Y1, gpioModeInput, 0);
      GPIO_PinModeSet(LCD_TOUCH_Y2, gpioModeInput, 0);

      do
      {
        delayMs(25);
        buttons = readButtons();

        /* Draw on screen */
        if ( buttons & BC_UIF_PB1 )
        {
          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */

          do
          { TOUCH_Pos_TypeDef *pos = TOUCH_GetPos();
            if ( pos->pen )
              drawPixel( pos->x, pos->y, COLOR );
              delayMs(1);

            buttons = readButtons() & ~BC_UIF_PB1;
            if(buttons == BC_UIF_PB4)
            {
              getNicePicture();
              buttons &= ~BC_UIF_PB4;
            }
          } while ( buttons == 0 );
        }

        /* Calibrate touch screen */
        else if ( buttons & BC_UIF_PB2 )
        {
          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 0 ].x, lcdCalibPoints[ 0 ].y, COLOR );
          TFT_DrawString(30, 35, "Tap green marker" );
          P[ 0 ] = getTouchTapSample10bit();

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 1 ].x, lcdCalibPoints[ 1 ].y, COLOR );
          TFT_DrawString(40, 130, "Tap green marker" );
          P[ 1 ] = getTouchTapSample10bit();

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          drawCross( lcdCalibPoints[ 2 ].x, lcdCalibPoints[ 2 ].y, COLOR );
          TFT_DrawString(20, 180, "Tap green marker" );
          P[ 2 ] = getTouchTapSample10bit();

          TOUCH_CalibrationTable((POINT*)&lcdCalibPoints,/* Display coordinates*/
                                &P[0]);                  /* Touch coordinates  */

          memset( frameBuffer, BLACK, 2 * WIDTH * HEIGHT );   /* Clear screen */
          TFT_DrawString(10, 100, "The touch screen is" );
          TFT_DrawString(30, 130, "now calibrated !" );
        }

        /* Display help screen */
        else if ( buttons & BC_UIF_PB3 )
        {
          displayHelpScreen();
          while ( readButtons() & BC_UIF_PB3 )
             delayMs(50);
        } else if( buttons & BC_UIF_PB4 )
        {
          getNicePicture();
        }
      } while ( ( buttons & EXIT_LOOP ) == 0 );
    }
    else
    {
      BSP_LedsSet(0x8001);
      delayMs(100);
      BSP_LedsSet(0x4002);
    }
  }
}
/**
 * Runs the tracking.
 */
void Tracker::executeTracker()
{
	#define PI (3.1415926535897932384626433832795028841)
	// Kinect fow: 43° vertical, 57° horizontal
	double verticalScalingFactor = tan(43 * PI / 180) / 240;
	double horizontalScalingFactor = tan(57 * PI / 180) / 320;
	ROS_DEBUG("Scaling factors: %lf/%lf", horizontalScalingFactor, verticalScalingFactor);

	bool quadcopterTracked = false;

	// Images
	cv::Mat cameraImage(cv::Size(640, 480), CV_8UC3); // Only for showCameraImage == true.
	cv::Mat maskedImage(cv::Size(640, 480), CV_8UC3); // Only for showMaskedImage == true.
	cv::Mat image(cv::Size(640, 480), CV_8UC3); // The raw image from the camera.
	cv::Mat mapImage(cv::Size(640, 480), CV_8UC1); // The color mapped image.
	cv::Mat hsvImage(cv::Size(640, 480), CV_8UC3);  // The raw image in hsv format.

	// CvBlob
	cvb::CvBlobs blobs;
	IplImage *labelImg = cvCreateImage(image.size(), IPL_DEPTH_LABEL, 1);
	cv::Mat morphKernel = cv::getStructuringElement(CV_SHAPE_RECT, cv::Size(5, 5));
	cvb::CvTracks tracks;
	IplImage iplMapImage;

	while (!stopFlag) {
		if (imageDirty == 0) {
			usleep(100);
			continue;
		} else if (imageDirty > 1) {
			ROS_WARN("Skipped %d frames!", imageDirty - 1);
		}

		START_CLOCK(trackerClock)

		imageMutex.lock();
		((cv::Mat*) this->image)->copyTo(image);
		long int time = this->imageTime;
		imageDirty = 0;
		imageMutex.unlock();

		if (showCameraImage)
			image.copyTo(cameraImage);

		createColorMapImage(image, mapImage, hsvImage);

		if (showMaskedImage) {
			// Convert to 3 channel image.
			int target = 0;

			for (int i = 0; i < mapImage.total(); ++i) {
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
			}
		}

		cv::morphologyEx(mapImage, mapImage, cv::MORPH_OPEN, morphKernel);

		// Finding blobs
		// Only copies headers.
		iplMapImage = mapImage;
		unsigned int result = cvLabel(&iplMapImage, labelImg, blobs);
		// ROS_DEBUG("Blob result: %d", result);

		// Filter blobs
		cvFilterByArea(blobs, 10, 1000000);

		if (showCameraImage || showMaskedImage)
			cvUpdateTracks(blobs, tracks, 200., 5);

		if (showMaskedImage) {
			// Only copies headers.
			IplImage iplImage = maskedImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage) {
			// Only copies headers.
			IplImage iplImage = cameraImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage || showMaskedImage)
			cvReleaseTracks(tracks);

		if (blobs.size() != 0) {
			// Find biggest blob
			cvb::CvLabel largestBlob = cvLargestBlob(blobs);
			CvPoint2D64f center = blobs.find(largestBlob)->second->centroid;
			double x = center.x;
			double y = center.y;

			// Set (0, 0) to center.
			x -= 320;
			y -= 240;
			// ROS_DEBUG("Center: %lf/%lf", x, y);

			// Apply scaling
			x *= horizontalScalingFactor;
			y *= verticalScalingFactor;

			dataReceiver->receiveTrackingData(cv::Scalar(x, y, 1.0), ((QuadcopterColor*) qc)->getId(), time);

			if (showMaskedImage)
				drawCross(maskedImage, center.x, center.y);

			if (showCameraImage)
				drawCross(cameraImage, center.x, center.y);

			if (!quadcopterTracked) {
				quadcopterTracked = true;
				ROS_DEBUG("Quadcopter %d tracked", ((QuadcopterColor*) this->qc)->getId());
			}
		} else if (quadcopterTracked) {
			quadcopterTracked = false;
			ROS_DEBUG("Quadcopter %d NOT tracked", ((QuadcopterColor*) this->qc)->getId());
		}

		// Free cvb stuff.
		cvReleaseBlobs(blobs);

		// ROS_DEBUG("cvb stuff freed"); // TODO Tracking down issue #7

		if (showMaskedImage) {
			cv::imshow(maskedWindowName, maskedImage);

			ROS_DEBUG("showed masked image"); // TODO Tracking down issue #7
		}

		if (showCameraImage) {
			cv::imshow(cameraWindowName, cameraImage);

			ROS_DEBUG("showed camera image"); // TODO Tracking down issue #7
		}

		STOP_CLOCK(trackerClock, "Calculation of quadcopter position took: ")
	}

	cvReleaseImage(&labelImg);

	if (showMaskedImage)
		cv::destroyWindow(maskedWindowName);

	if (showCameraImage)
		cv::destroyWindow(cameraWindowName);

	ROS_INFO("Tracker with id %d terminated", ((QuadcopterColor*) this->qc)->getId());
}
Exemple #29
0
void Tracking::begin(){
    if(!capture.isOpened()){
        std::cerr << "Problem opening video source" << std::endl;
    }

    std::vector<cv::KeyPoint> newKeyPoints;
    int nf = 0;
    while((char)cv::waitKey(20) != 'q' && capture.grab()){
        capture.retrieve(frame);
        cv::Mat gray;
        cv::cvtColor(frame, gray, CV_BGR2GRAY);
        if(background.size().width == 0){
            background = gray.clone();
        }

        int rnd = rand()%sampleRate;
        if(rnd == 0){
            background = backgroundSubtructor.getBackground(gray);
        }
        //skip 20 first frames
        if(nf < 20){
            nf++;
            continue;
        }

        foreground = backgroundSubtructor.subtract(gray, background );
        cv::erode(foreground, foreground, cv::Mat());
        cv::dilate(foreground, foreground, cv::Mat());
        cv::dilate(foreground, foreground, cv::Mat());
        cv::erode(foreground, foreground, cv::Mat());


        cv::imshow("xx", background);

        std::vector<std::vector<cv::Point> > newContours;
        cv::findContours(foreground, newContours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
        std::vector<cv::Mat> movingRegions;

        std::vector<std::vector<cv::Point> > contours;
        for(size_t i = 0; i < newContours.size(); i++){
            if(cv::contourArea(newContours[i]) > minContourArea){
                cv::Mat region = cv::Mat::zeros(frame.size(), CV_8UC1);
                cv::drawContours(region, newContours, i, cv::Scalar::all(1), CV_FILLED, 8, std::vector<cv::Vec4i>(), 0, cv::Point());
                movingRegions.push_back(imageMultiply(gray, region));
                contours.push_back(newContours[i]);
            }
        }

        cv::drawContours(frame, newContours, -1, cv::Scalar(0,0,255), 1);
        std::vector<cv::Moments> mu(contours.size() );                                                                                                          
        for( size_t i = 0; i < contours.size(); i++ ){
            mu[i] = cv::moments( contours[i], false );
        }
        
        std::vector<cv::Point2f> newMc( contours.size() );                                                                                                         
        for( size_t i = 0; i < contours.size(); i++ ){ 
            newMc[i] = cv::Point2f( mu[i].m10/mu[i].m00 , mu[i].m01/mu[i].m00 ); 
        }

        //if(measurement.at<float>(0) < 0){
        if(!initialised){
            tracker = cv::Mat::zeros(frame.size(), CV_8UC3);
            initialised = true;
        }
        //cv::drawKeypoints(foreground, newKeyPoints, tracker, CV_RGB(0,255,0), cv::DrawMatchesFlags::DEFAULT);
        std::vector<cv::Mat> newDescriptors;
        std::vector<int> keyPointCount;
        std::vector<cv::Point2f> mc;
        for(int i = 0; i < movingRegions.size(); i++){
            cv::Mat des;

            std::vector<cv::KeyPoint> kp; 
            detector.detect(movingRegions[i], kp);
            extractor.compute(frame, kp, des);
            if(des.size().width > 0){
                newDescriptors.push_back(des);
                keyPointCount.push_back(kp.size());
                mc.push_back(newMc[i]);
            }
        }
        if(newDescriptors.size() > 0 && objects.size() == 0){
            for(int i = 0; i < newDescriptors.size(); i++){
                MovingObject obj(mc[i], newDescriptors[i]);
                objects.push_back(obj);
            }
        }else if(newDescriptors.size() > 0 && objects.size() > 0 ){
            for(int objIndex = 0; objIndex < objects.size(); objIndex++){
                double bestMatch = minDistance;
                double score = minScore;
                int found = -1;
                for(int desIndex = 0; desIndex < newDescriptors.size(); desIndex++){
                    cv::vector<cv::DMatch> matches;
                    matcher.match(objects[objIndex].descriptor, newDescriptors[desIndex], matches);
                    int nm = 0;
                    for (int matchIndex = 0; matchIndex < matches.size(); matchIndex++){
                        if(matches[matchIndex].distance < minDistance){
                            nm++; 
                        }
                    }
                    if(nm*1.0 / keyPointCount[desIndex] > score){
                        score = nm*1.0 / keyPointCount[desIndex];
                        found = desIndex;
                    }
                }
                if( found >= 0){
                    objects[objIndex].descriptor = newDescriptors[found];
                    objects[objIndex].process(mc[found]);
                    mc.erase(mc.begin() + found);
                    newDescriptors.erase(newDescriptors.begin() + found);

                    //draw new Point
                    drawCross(tracker, objects[objIndex].getMeasurement(), objects[objIndex].measurementColor, 2);
                    drawCross(tracker, objects[objIndex].getPrediction(), objects[objIndex].predictionColor, 2);
                    drawCross(tracker, objects[objIndex].getEstimatedPosition(), objects[objIndex].estimatedColor, 2);
  //                  objects[objIndex].draw(tracker);
                }else{
                    objects[objIndex].updateWithoutCorrectrion();
                }
                
                if(objects[objIndex].noMeasurement > 50 || objects[objIndex].found == 0){
                    objects.erase(objects.begin() + objIndex);
                }

            }
                //add new objects
            for(int i = 0; i < newDescriptors.size(); i++){
                MovingObject obj(mc[i], newDescriptors[i]);
                objects.push_back(obj);
            }
        }

        cv::imshow("Video", frame);
        cv::imshow("Tracker", tracker);
    }
}
/*************************************************
  vision-serverの本体
    Cameraデータの取得、画像処理、ソケット通信待ち受けを行う
************************************************/
int main (int argc, char **argv){
  CvSize size;
  int step;
  CvCapture *cap;
  IplImage *capture_image;
  IplImage *frame_image;
  IplImage *processed_image;
  IplImage *grayImage; 
  IplImage *binaryImage;
  unsigned char* binarydata;

  CvFont font;
  char text[50];
  char hostname[30];
  int s, i, port = 9000;
  pthread_t tid;

  /*** socket通信のための処理(ここから) ***/
  for (i=1;i<argc;i++){
    if (strcmp("-port", argv[i]) == 0) {
      port=atoi(argv[++i]);
    }}
  gethostname(hostname, sizeof(hostname));
  s = init_socket_server(hostname, &port);
  fprintf(stderr, "hostname %s\n", hostname);
  for (i=0; i< MAX_SOCKET ; i++) sockets[i].type=0;
  //threadで待ちうけ
  fprintf(stderr, "Waiting connection...\n");
  pthread_create(&tid, NULL, acceptor, (void *)s);
  /*** socket通信のための処理(ここまで) ***/

  /** semaphoreの準備 ***/
  raw_semaphore = semget((key_t)1111, 1, 0666|IPC_CREAT);
  if(raw_semaphore == -1){
    perror("semget failure");
    exit(EXIT_FAILURE);
  }
  process_semaphore = semget((key_t)1111, 1, 0666|IPC_CREAT);
  if(process_semaphore == -1){
    perror("semget failure");
    exit(EXIT_FAILURE);
  }
  union semun semunion;
  semunion.val = 0;  //semaphoreの初期値
  if(semctl(raw_semaphore, 0, SETVAL, semunion) == -1){
    perror("semctl(init) failure");
    exit(EXIT_FAILURE);
  }
  if(semctl(process_semaphore, 0, SETVAL, semunion) == -1){
    perror("semctl(init) failure");
    exit(EXIT_FAILURE);
  }
  /** semaphoreの準備(ここまで) ***/

  /** cameraや画像取得の用意(ここから) ***/
  //camera initialization 
  if((cap = cvCreateCameraCapture(-1))==NULL){
    printf("Couldn't find any camera.\n");
    return -1;
  }
  capture_image = cvQueryFrame(cap);
  width = capture_image->width;
  height = capture_image->height;
  fprintf(stderr, "height %d, width %d\n", height, width);
  fprintf(stderr, "process height %d, process width %d\n", process_height, process_width);
  /** cameraや画像取得の用意(ここまで) ***/

  /** 画像処理(赤色抽出)の準備 ***/
  //fontの設定(しないとSegfaultで落ちる)
  float hscale = 1.0f;
  float vscale = 1.0f;
  float italicscale = 0.0f;
  int thickness = 3;
  cvInitFont(&font, CV_FONT_HERSHEY_COMPLEX, hscale, vscale, italicscale, thickness, CV_AA);
  //font設定ここまで
  // Set threshold
  rgb_thre[0] = R_MIN_THRE;
  rgb_thre[1] = R_MAX_THRE;
  rgb_thre[2] = G_MIN_THRE;
  rgb_thre[3] = G_MAX_THRE;
  rgb_thre[4] = B_MIN_THRE;
  rgb_thre[5] = B_MAX_THRE;


  //画像処理するイメージ領域を確保
  frame_image = cvCreateImage(cvSize(process_width, process_height), IPL_DEPTH_8U, 3);
  processed_image = cvCreateImage(cvSize(process_width, process_height), IPL_DEPTH_8U, 3);
  /** 画像処理(赤色抽出)の準備(ここまで) ***/

  
  /**** 面積を出すための2値化 ***/
  grayImage = cvCreateImage(cvGetSize(frame_image), IPL_DEPTH_8U, 1);
  binaryImage = cvCreateImage(cvGetSize(frame_image), IPL_DEPTH_8U, 1);
  
  //Labeling init
  label_buf = (int*)malloc(sizeof(int)*frame_image->width*frame_image->height);

  /**** main loop(本体) ****/
  while(1){
    CvPoint centroid;
    //カメラ画像をcaptureする
    capture_image = cvQueryFrame(cap);
    if (capture_image==NULL) {
      fprintf(stderr, "capture_image is %p\n", capture_image);
      continue;
    }
    cvResize(capture_image, frame_image, CV_INTER_LINEAR);

    //カメラ画像を処理する
    maskRGB(frame_image, processed_image, rgb_thre);          //赤色抽出
    // Binarize
    myBinarize(processed_image, grayImage, binaryImage);
    cvDilate(binaryImage, grayImage, NULL, 10); //ぼうちょう
    cvErode(grayImage, binaryImage, NULL, 15);  //収縮
    // Labeling
    cvGetRawData(binaryImage, &binarydata, &step, &size);
    labeling(binarydata, frame_image->height, frame_image->width, label_buf, step);
    label_num = labeling_result(&linfo, label_buf, frame_image->height, frame_image->width);
    //処理結果を書き込む
    {
      int i,n;
      n=25;
      //fprintf(stderr, "num is %d\n", label_num);
      for(i=0; i<label_num; i++){
        //fprintf(stderr, "area %d, x %d y %d\n", linfo[i].area, (int)linfo[i].xpos, (int)linfo[i].ypos);
        centroid.x = (int) linfo[i].xpos;
        centroid.y = (int) linfo[i].ypos;
        drawCross(processed_image, &centroid, CV_RGB(0, 255, 0));                                 //×印をいれる
        sprintf(text, "X: %d Y: %d AREA: %d", centroid.x, centroid.y, linfo[i].area);             //値をかく
        cvPutText(processed_image, text, cvPoint(n, (height-n*(i+1))), &font, CV_RGB(0, 255, 0)); //
      }
    }
    // image -> rawdata
    sema_wait(raw_semaphore);
    cvGetRawData(frame_image, &rawdata, &step, &size);
    
    // process image -> process data
    sema_wait(process_semaphore);
    cvGetRawData(processed_image, &processdata, &step, &size);

    //sleep
    usleep(30000);
  }
  //release the capture object
  cvReleaseCapture(&cap);
  return 0;
}