Пример #1
0
void MainWindow::toggleTracking()
{    
    if (isTracking)
        stopTracking();
    else
        startTracking();
}
Пример #2
0
void Window::configureActions()
{
    showAboutAction = new QAction(tr("&About"), this);
    showAboutAction->setIcon(aboutIcon);
    connect(showAboutAction, SIGNAL(triggered()), this, SLOT(showAbout()));

    showResultAction = new QAction(tr("&Show result"), this);
    showResultAction->setIcon(logoIcon);
    connect(showResultAction, SIGNAL(triggered()), this, SLOT(showResult()));

    quitAction = new QAction(tr("&Quit"), this);
    quitAction->setIcon(QIcon(":/images/Close.png"));
    connect(quitAction, SIGNAL(triggered()), qApp, SLOT(quit()));


    startTrackingAction = new QAction(tr("Start tracking"), this);
    startTrackingAction->setIcon(playIcon);
    connect(startTrackingAction, SIGNAL(triggered()), this, SLOT(startTracking()));

    pauseTrackingAction = new QAction(tr("Pause tracking"), this);
    pauseTrackingAction->setIcon(pauseIcon);
    connect(pauseTrackingAction, SIGNAL(triggered()), this, SLOT(pauseTracking()));
    pauseTrackingAction->setEnabled(false);

    resetTrackedTimeAction = new QAction(tr("Reset tracking result"), this);
    connect(resetTrackedTimeAction, SIGNAL(triggered()), this, SLOT(resetTrackingResult()));
    resetTrackedTimeAction->setEnabled(false);

}
Пример #3
0
void TransFunc1DRampEditor::createConnections() {
    // Buttons
    connect(clearButton_, SIGNAL(clicked()), this, SLOT(clearButtonClicked()));
    connect(loadButton_, SIGNAL(clicked()), this, SLOT(loadTransferFunction()));
    connect(saveButton_, SIGNAL(clicked()), this, SLOT(saveTransferFunction()));

    // signals from transferMappingCanvas
    connect(transCanvas_, SIGNAL(changed()), this, SLOT(updateTransferFunction()));
    connect(transCanvas_, SIGNAL(loadTransferFunction()), this, SLOT(loadTransferFunction()));
    connect(transCanvas_, SIGNAL(saveTransferFunction()), this, SLOT(saveTransferFunction()));
    connect(transCanvas_, SIGNAL(resetTransferFunction()), this, SLOT(clearButtonClicked()));
    connect(transCanvas_, SIGNAL(toggleInteractionMode(bool)), this, SLOT(toggleInteractionMode(bool)));

    // signals for colorPicker
    connect(transCanvas_, SIGNAL(colorChanged(const QColor&)),
            colorPicker_, SLOT(setCol(const QColor)));
    connect(transCanvas_, SIGNAL(colorChanged(const QColor&)),
            colorLumPicker_, SLOT(setCol(const QColor)));
    connect(colorPicker_, SIGNAL(newCol(int,int)),
            colorLumPicker_, SLOT(setCol(int,int)));
    connect(colorLumPicker_, SIGNAL(newHsv(int,int,int)),
            this, SLOT(markerColorChanged(int,int,int)));
    connect(colorPicker_, SIGNAL(toggleInteractionMode(bool)), this, SLOT(toggleInteractionMode(bool)));
    connect(colorLumPicker_, SIGNAL(toggleInteractionMode(bool)), this, SLOT(toggleInteractionMode(bool)));

    // doubleslider
    connect(doubleSlider_, SIGNAL(valuesChanged(float, float)), this, SLOT(thresholdChanged(float, float)));
    connect(doubleSlider_, SIGNAL(toggleInteractionMode(bool)), this, SLOT(toggleInteractionMode(bool)));

    // threshold spinboxes
    connect(lowerThresholdSpin_, SIGNAL(valueChanged(int)), this, SLOT(lowerThresholdSpinChanged(int)));
    connect(upperThresholdSpin_, SIGNAL(valueChanged(int)), this, SLOT(upperThresholdSpinChanged(int)));

    connect(checkClipThresholds_, SIGNAL(toggled(bool)), transCanvas_, SLOT(toggleClipThresholds(bool)));

    //ramp slider and spinboxes
    connect(sliderRampCenter_, SIGNAL(valueChanged(int)), this, SLOT(updateRampCenter(int)));
    connect(spinRampCenter_,   SIGNAL(valueChanged(int)), this, SLOT(updateRampCenter(int)));
    connect(spinRampWidth_,    SIGNAL(valueChanged(int)), this, SLOT(updateRampWidth(int)));
    connect(sliderRampWidth_,  SIGNAL(valueChanged(int)), this, SLOT(updateRampWidth(int)));

    connect(sliderRampCenter_, SIGNAL(sliderPressed()),  this, SLOT(startTracking()));
    connect(sliderRampWidth_,  SIGNAL(sliderPressed()),  this, SLOT(startTracking()));
    connect(sliderRampCenter_, SIGNAL(sliderReleased()), this, SLOT(stopTracking()));
    connect(sliderRampWidth_,  SIGNAL(sliderReleased()), this, SLOT(stopTracking()));
}
Пример #4
0
void MlUVView::clientSelect(QMouseEvent *)
{
    const Ray * ray = getIncidentRay();
	Vector2F p(ray->m_origin.x, ray->m_origin.y);
	startTracking(p);
	pickupFeather(p);
	if(interactMode() ==  ToolContext::MoveVertexInUV) {
		m_selectedVert = FeatherLibrary->selectedFeatherExample()->selectVertexInUV(p, m_moveYOnly, m_selectVertWP);
	}
}
VRGenericHeadTracker::VRGenericHeadTracker()
: _localPosition(Vec3::ZERO)
{
#if (CC_TARGET_PLATFORM == CC_PLATFORM_IOS)
    _motionMgr = [[CMMotionManager alloc] init];
#endif

#if (CC_TARGET_PLATFORM == CC_PLATFORM_IOS) || (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
    startTracking();
#endif
}
void TransFuncEditorIntensityGradient::createConnections() {
    if (!supported_)
        return;

    // buttons
    connect(loadButton_,  SIGNAL(clicked()), this, SLOT(loadTransferFunction()));
    connect(saveButton_,  SIGNAL(clicked()), this, SLOT(saveTransferFunction()));
    connect(clearButton_, SIGNAL(clicked()), painter_, SLOT(resetTransferFunction()));

    connect(gridEnabledButton_,      SIGNAL(clicked()), this, SLOT(toggleShowGrid()));
    connect(histogramEnabledButton_, SIGNAL(clicked()), this, SLOT(toggleShowHistogram()));

    connect(quadButton_,   SIGNAL(clicked()), painter_, SLOT(addQuadPrimitive()));
    connect(bananaButton_, SIGNAL(clicked()), painter_, SLOT(addBananaPrimitive()));
    connect(deleteButton_, SIGNAL(clicked()), painter_, SLOT(deletePrimitive()));
    connect(colorButton_,  SIGNAL(clicked()), painter_, SLOT(colorizePrimitive()));

    connect(histogramBrightness_, SIGNAL(sliderMoved(int)), painter_, SLOT(histogramBrightnessChanged(int)));
    connect(histogramLog_, SIGNAL(stateChanged(int)), painter_, SLOT(toggleHistogramLogarithmic(int)));

    // slider
    connect(fuzziness_, SIGNAL(valueChanged(int)), painter_, SLOT(fuzzinessChanged(int)));
    connect(transparency_, SIGNAL(valueChanged(int)), painter_, SLOT(transparencyChanged(int)));

    connect(fuzziness_, SIGNAL(sliderPressed()), this, SLOT(startTracking()));
    connect(transparency_, SIGNAL(sliderPressed()), this, SLOT(startTracking()));

    connect(fuzziness_, SIGNAL(sliderReleased()), this, SLOT(stopTracking()));
    connect(transparency_, SIGNAL(sliderReleased()), this, SLOT(stopTracking()));

    connect(painter_, SIGNAL(setTransparencySlider(int)), this, SLOT(setTransparency(int)));
    connect(painter_, SIGNAL(primitiveDeselected()), this, SLOT(primitiveDeselected()));
    connect(painter_, SIGNAL(primitiveSelected()), this, SLOT(primitiveSelected()));
    connect(painter_, SIGNAL(toggleInteractionMode(bool)), this, SLOT(toggleInteractionMode(bool)));
    connect(painter_, SIGNAL(repaintSignal()), this, SLOT(repaintSignal()));

}
AsynchronousStatus
NokiaMaemoLocationInterfaceImpl::startLbs()
{
   if (m_registeredAndTracking) {
      // We are already started and tracking are on.
      nav2log << "[Debug] NokiaMaemoLocationInterfaceImpl::startLbs : "
              << "We are already started and are tracking are on."
              << endl;
      return AsynchronousStatus(RequestID(RequestID::INVALID_REQUEST_ID),
                                OK, "", "");
   }

   // Send CellID request
   if (!checkAndSendCellIDRequest(true)) {
      nav2log << "[Debug] NokiaMaemoLocationInterfaceImpl::startLbs : "
              << "Send CellID request failed" << endl;
      return AsynchronousStatus(RequestID(RequestID::INVALID_REQUEST_ID),
                                LBS_STARTUP_FAILED, "", "");
   }

   bool started = registerLbsClient();
   if (!started) {
      // Not good, could not start the gps.
      nav2log << "[Debug] NokiaMaemoLocationInterfaceImpl::startLbs : " 
              << "registerLbsClient() failed." << endl;
      return AsynchronousStatus(RequestID(RequestID::INVALID_REQUEST_ID),
                                LBS_STARTUP_FAILED, "", "");
   } else {
      if (!startTracking()) {
         // Failed to start the tracking, fatal error. Deregister
         // client
         nav2log << "[Debug] NokiaMaemoLocationInterfaceImpl::startLbs : "
                 << "startTracking() failed." << endl;
         deregisterLbsClient();
         return AsynchronousStatus(RequestID(RequestID::INVALID_REQUEST_ID),
                                   LBS_START_TRACKING_FAILED, "", "");
      }
      m_registeredAndTracking = true;
      m_needSendStartedCallback = true;
      isab::GenericGuiMess message(isab::GuiProtEnums::CONNECT_GPS);
      sendAsynchronousRequestToNav2(&message);
      return AsynchronousStatus(RequestID(RequestID::INVALID_REQUEST_ID),
                                OK, "", "");
   }   
}
Пример #8
0
void MeanShift::startTracking(const Image* image, const CvConnectedComp* cComp)
{
    if (!cComp->contour)    /* Not really connected component */
        return startTracking(image, cComp->rect);

    Image* mask = new Image(image->size(), UByte, 1);
    cvDrawContours(mask->cvImage(),
                   cComp->contour,
                   cvScalar(255),
                   cvScalar(255),
                   -1,
                   CV_FILLED,
                   8);

    delete m_trackingHist;
    m_trackingHist = Histogram::createHSHistogram(image, mask);
    m_lastPostition = cComp->rect;
    delete mask;
}
void Client::startClient()
{
    setup();

    QThread* keypadThread = new QThread();
    QObject::connect(keypadThread, SIGNAL(started()), keypad, SLOT(start()));
    QObject::connect(keypad, SIGNAL(keypadFinished()), keypadThread, SLOT(quit()));
    QObject::connect(keypad, SIGNAL(keypadFinished()), keypadThread, SLOT(quit()));
    QObject::connect(keypadThread, SIGNAL(finished()), this, SLOT(noKeypad()));

    keypad->moveToThread(keypadThread);
    keypadThread->start();

    QThread* netThread = new QThread();
    QObject::connect(netThread, SIGNAL(started()), network, SLOT(begin()));
    QObject::connect(network, SIGNAL(networkFinished()), netThread, SLOT(quit()));

    network->moveToThread(netThread);
    netThread->start();
    // QMetaObject::invokeMethod(network,"begin");

    // we need to authenticate the handset before we can do anything
    while (!authenticated && !error)
    {
        authenticated = authenticateDevice();
    }
    if (error)
    {
        // put quit onto event queue
        QMetaObject::invokeMethod(this, "forceQuit", Qt::QueuedConnection);
        return;
    }
    qDebug() << "Authenticated";


    QThread* trackerThread = new QThread();
    QObject::connect(trackerThread, SIGNAL(started()), tracker, SLOT(startTracking()));
    QObject::connect(tracker, SIGNAL(trackerFinished()), trackerThread, SLOT(quit()));

    tracker->moveToThread(trackerThread);
    trackerThread->start();
}
void SensorFileSysWalker::prepareWaitFileInfo() {
	if( changeHandle == NULL )
		changeHandle = startTracking( curFile , false );
	else
		continueTracking( changeHandle );
}
void SensorFileSysWalker::prepareWaitDirectoryInfo() {
	if( changeHandle == NULL )
		changeHandle = startTracking( curDir + "\\" , true );
	else
		continueTracking( changeHandle );
}
Пример #12
0
int main( int argc, char** argv )
{
	// Store the first detected face
	CvRect *pFaceRect = NULL;

	// Store the tracked face
	CvRect nextFaceRect;
	//CvBox2D faceBox; 

	// Points to draw the face rectangle
	CvPoint pt1 = cvPoint(0,0);
	CvPoint pt2 = cvPoint(0,0);
	
	char c = 0;  

	// Object faceDetection of the class "FaceDetection"
    FaceDetection faceDetection;

	// Object captureFrame of the class "CaptureFrame"
	CaptureFrame captureFrame; 

	// Create a new window 
    cvNamedWindow("tracked face", 1);

	printf("\nPress r to re-initialise tracking");

	// Capture from the camera
	captureFrame.StartCapture();

	bool finished = captureFrame.CaptureNextFrame(); // capture into frameCopy
	if (finished) // if video is finished
	 {
	   captureFrame.DeallocateFrames();
	   releaseTracker();
	   cvDestroyWindow("tracked face");
	   return 0;
	 }
		      
    // Create the tracker
    if(!createTracker(captureFrame.getFrameCopy())) 
	   fprintf( stderr, "ERROR: tracking initialisation\n" );

	// Set Camshift parameters
	setVmin(30);
	setSmin(20);

	// Capture video until a face is detected
	pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame);
	// Start tracking
	if (pFaceRect == NULL)
	{
	 captureFrame.DeallocateFrames();
	 releaseTracker();
	 // Destroy the window previously created
	 cvDestroyWindow("tracked face");
	 return 0;
	}
	// Start tracking
	startTracking(captureFrame.getFrameCopy(), pFaceRect);


	// Track the detected face using CamShift
	while(1)
	{
		finished = captureFrame.CaptureNextFrame(); //capture to frameCopy
		
		if (finished) 
		{
	      captureFrame.DeallocateFrames();
		  releaseTracker();
		  cvDestroyWindow("tracked face");
		  return 0;
		}
			 
		// Track the face in the new video frame
		nextFaceRect = track(captureFrame.getFrameCopy());
		//faceBox = track(captureFrame.getFrameCopy());

		pt1.x = nextFaceRect.x;
        pt1.y = nextFaceRect.y;
        pt2.x = pt1.x + nextFaceRect.width;
        pt2.y = pt1.y + nextFaceRect.height;

		// Draw face rectangle
		cvRectangle(captureFrame.getFrameCopy(), pt1, pt2, CV_RGB(255,0,0), 3, 8, 0 );

		// Draw face ellipse
		//cvEllipseBox(captureFrame.getFrameCopy(), faceBox,
		             //CV_RGB(255,0,0), 3, CV_AA, 0 );

		cvShowImage("tracked face", captureFrame.getFrameCopy());
			
		c = cvWaitKey(100);
		switch(c)
		{		
			case 27: break;
				break;
			case 'r': printf("\nKey pressed for re-initialisation");
				// Capture video until a face is detected
				pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame);
				
				if (pFaceRect == NULL) 
				{
				 captureFrame.DeallocateFrames();
				 releaseTracker();
				 // Destroy the window previously created
				 cvDestroyWindow("tracked face");
			     return 0;
				}
				releaseTracker();
				// Start tracking
				startTracking(captureFrame.getFrameCopy(), pFaceRect);
				break;
		}
	}

	// Release the image and tracker
  	captureFrame.DeallocateFrames();
    releaseTracker();

    // Destroy the window previously created
    cvDestroyWindow("tracked face");
    return 0;
}
// main()
int main( int argc, char** argv )
{

	int starting = 3;
	int flag=0;
	CvPoint pt;
	int x = 0,diffx=0,prevx=0,initx=0;
	int y = 0,diffy=0,prevy=0,inity=0;
 
  	// Open X display
	Display *display = XOpenDisplay (NULL);
	if (display == NULL)
        {
      		fprintf (stderr, "Can't open display!\n");
      		return -1;
    	}
  
  	// Wait 3 seconds to start
  	printf ("Starting in ");
  		fflush (stdout);
  	while (starting > 0)
    	{
      		printf ("\b\b\b %d...", starting);
      		fflush (stdout);
      		sleep (1);
      		starting--;
    	}
  	printf ("\n");
    IplImage* temp=cvCreateImage(cvSize(80,120),8,3);
	IplImage* pframe1;
	
	CvRect *pHandRect=0,*vrect=NULL;
	capture=cvCaptureFromCAM(0);	
	if( !initAll() ) exitProgram(-1);
	
		int g;
	piframe=cvQueryFrame(capture);
	pframe=invert(piframe);
	pframe1=cvCloneImage(piframe);
	// Capture and display video frames until a hand
	// is detected
	int i=0;
	char c;	
	initPCA();
    char ch;

	x :
	printf("came to x\n");
	while( 1 )
	{		
		// Look for a hand in the next video frame
		pframe=cvQueryFrame(capture);
		pframe1=cvCloneImage(pframe);
    	detect_and_draw(pframe);
		pHandRect = detectHand(pframe);
		
		if((pHandRect)&&(pHandRect->x>4)&&(pHandRect->y>4)&&(pHandRect->x*pHandRect->y<(240*300))&&(pHandRect->x<630)&&(pHandRect->y<470))
		{	
			cvRectangle(pframe1,cvPoint((pHandRect->x-4),pHandRect->y-4),cvPoint((pHandRect->x+pHandRect->width+4),pHandRect->y+pHandRect->height+4),CV_RGB(255,0,0),1,8,0);		
			i++;
		}
		else 
			i=0;
		// Show the display image
		cvShowImage( DISPLAY_WINDOW, pframe1 );
		cvMoveWindow(DISPLAY_WINDOW,0,0);
		c=cvWaitKey(10); 
		if(c==27)
                {   
        	exitProgram(0);
		}
		if(i>=3)
		{	// exit loop when a hand is detected
			if(pHandRect) {
				i=0;
				prevx=pHandRect->x;
				initx=pHandRect->x;
				prevy=pHandRect->y+pHandRect->height;
				flag=3;
				break;
			}
		}
	}

	// initialize tracking
	KalmanFilter kfilter;
	startTracking(pframe, *pHandRect,kfilter);
	// Track the detected hand using CamShift
	while( 1 )
	{
		CvRect handBox;

		// get the next video frame
		pframe=cvQueryFrame(capture);
		pframe1=cvCloneImage(pframe);
		handBox = combi_track(pframe,kfilter);
        int old_ht;
        int a;
		IplImage* temp;
		if(!((handBox.x<0)||(handBox.y<0)||((handBox.x+handBox.width)>pframe->width)||((handBox.y+handBox.height)>pframe->height))) 
        {
            if(handBox.height>(1.3*handBox.width))
            {
                old_ht=handBox.height;
                handBox.height=2.4*handBox.width;
                handBox.y-=handBox.height-old_ht;
            }
            cvSetImageROI(pframe,handBox);
            temp=cvCreateImage(cvGetSize(pframe),8,3);

            cvCopy(pframe,temp,NULL);

	        a=recognize(temp);
	        cvReleaseImage(&temp);
	        if(handBox.height>(2.3*handBox.width))
            {	
            	if(a==3)
            		a=5;
            }
			diffx=handBox.x+(handBox.width/2)-initx;
			diffy=handBox.y+handBox.height-(handBox.width/2)-prevy;
			prevx=handBox.x+(handBox.width/2);
			prevy=handBox.y+handBox.height-(handBox.width/2);

	        cvResetImageROI(pframe);
    		cvRectangle(pframe1,cvPoint(handBox.x,handBox.y),cvPoint(handBox.x+handBox.width,handBox.y+handBox.height),CV_RGB(0,0,255),3,8,0);		
            
	        if(diffx<(-60))
	        {	click(display,1,0);
	        	printf("right click\n");
	        	goto x;
	        }
	        else if(diffx>(60))
	        {
	        	fake(display, 0);
	        	printf("left click\n");
	        	goto x;
	        }
	        else
	        {}

        }
        else
        	goto x;

		cvShowImage( DISPLAY_WINDOW, pframe1 );

		ch=cvWaitKey(10);
		if( ch==27 ) {
			exitProgram(0);			
			break;
		}
		if(ch=='s'){
		    cvSetImageROI(pframe,handBox);
		    cvResize(pframe,temp);
		    cvSaveImage("image6.jpg",temp);
		    cvResetImageROI(pframe);
		}
	}
	return 0;
}
Пример #14
0
MeanShift::MeanShift(const Image* image, CvRect rect)
    : m_trackingHist(NULL)
{
    startTracking(image, rect);
}
Пример #15
0
MeanShift::MeanShift(const Image* image, const CvConnectedComp* cComp)
    : m_trackingHist(NULL)
{
    startTracking(image, cComp);
}
Пример #16
0
void CameraUtilityFrame::onNewCapture()
{
    startTracking();
}
Пример #17
0
// <Ansariel> Avatar tracking feature
void LLNetMap::handleStartTracking()
{
	startTracking();
}
Пример #18
0
		virtual int endMove ()
		{
			startTracking ();
			return Telescope::endMove ();
		}