/******************************************************************************
* The update function runs continuously. Use it to update states and variables
*****************************************************************************/
void ofxKCoreVision::_update(ofEventArgs &e){
	if(debugMode) if((stream = freopen(fileName, "a", stdout)) == NULL){}

	//kinect.update();
	recordContext.update();
	recordDepth.update();


	//bNewFrame = kinect.isFrameNew();
	bNewFrame = true;	// TODO: look how to this in the correct way;

	if(!bNewFrame){
		return;			//if no new frame, return
	} else {			//else process camera frame
		ofBackground(0, 0, 0);

		// Calculate FPS of Camera
		frames++;
		float time = ofGetElapsedTimeMillis();
		if (time > (lastFPSlog + 1000)) {
			fps = frames;
			frames = 0;
			lastFPSlog = time;
		}//End calculation

		float beforeTime = ofGetElapsedTimeMillis();

	//	if (bGPUMode){
	//		grabFrameToGPU(filter->gpuSourceTex);
	//		filter->applyGPUFilters();
	//		contourFinder.findContours(filter->gpuReadBackImageGS,  (MIN_BLOB_SIZE * 2) + 1, ((camWidth * camHeight) * .4) * (MAX_BLOB_SIZE * .001), maxBlobs, (double) hullPress , false);
	//	} else {
			grabFrameToCPU();
			filter->applyCPUFilters( processedImg );
			contourFinder.findContours(processedImg,  (MIN_BLOB_SIZE * 2) + 1, ((camWidth * camHeight) * .4) * (MAX_BLOB_SIZE * .001), maxBlobs, (double) hullPress, false);
	//	}

		//If Object tracking or Finger tracking is enabled
		if( contourFinder.bTrackBlobs || contourFinder.bTrackFingers || contourFinder.bTrackObjects)
			tracker.track(&contourFinder);

		//get DSP time
		differenceTime = ofGetElapsedTimeMillis() - beforeTime;

		//Dynamic Background subtraction LearRate
			if (filter->bDynamicBG){
			filter->fLearnRate = backgroundLearnRate * .0001; //If there are no blobs, add the background faster.
			if ((contourFinder.nBlobs > 0 )|| (contourFinder.nFingers > 0 ) ) //If there ARE blobs, add the background slower.
				filter->fLearnRate = backgroundLearnRate * .0001;
		}//End Background Learning rate

		//Sending TUIO messages
		if (myTUIO.bOSCMode || myTUIO.bTCPMode || myTUIO.bBinaryMode){
			myTUIO.setMode(contourFinder.bTrackBlobs, contourFinder.bTrackFingers , contourFinder.bTrackObjects);
			//myTUIO.sendTUIO( &getBlobs(), &getFingers(), &getObjects() );
			myTUIO.sendTUIO(tracker.getTrackedBlobsPtr(), tracker.getTrackedFingersPtr(), tracker.getTrackedObjectsPtr() );
		}
	}
}
예제 #2
0
/******************************************************************************
* The update function runs continuously. Use it to update states and variables
*****************************************************************************/
void ofxNCoreVision::_update(ofEventArgs &e)
{
	if(debugMode) if((stream = freopen(fileName, "a", stdout)) == NULL){}

	bNewFrame = false;

	if(bcamera) //if camera
	{
		#ifdef TARGET_WIN32
			if(PS3!=NULL)//ps3 camera
			{
				bNewFrame = PS3->isFrameNew();
			}
			else if(ffmv!=NULL)
			{
				ffmv->grabFrame();
				bNewFrame = true;
			}
			else if(vidGrabber !=NULL)
			{
				vidGrabber->grabFrame();
				bNewFrame = vidGrabber->isFrameNew();
			}
			else if(dsvl !=NULL)
			{
				bNewFrame = dsvl->isFrameNew();
			}
		#else
			vidGrabber->grabFrame();
			bNewFrame = vidGrabber->isFrameNew();
		#endif
	}
	else //if video
	{
		vidPlayer->idleMovie();
		bNewFrame = vidPlayer->isFrameNew();
	}

	//if no new frame, return
	if(!bNewFrame)
	{
		return;
	}
	else//else process camera frame
	{
		ofBackground(0, 0, 0);

		// Calculate FPS of Camera
		frames++;
		float time = ofGetElapsedTimeMillis();
		if (time > (lastFPSlog + 1000))
		{
			fps = frames;
			frames = 0;
			lastFPSlog = time;
		}//End calculation

		float beforeTime = ofGetElapsedTimeMillis();

		if (bGPUMode)
		{
			grabFrameToGPU(filter->gpuSourceTex);
			filter->applyGPUFilters();
			contourFinder.findContours(filter->gpuReadBackImageGS,  (MIN_BLOB_SIZE * 2) + 1, ((camWidth * camHeight) * .4) * (MAX_BLOB_SIZE * .001), maxBlobs, false);

			if(contourFinder.bTrackFiducials)
			{
				grabFrameToGPU(filter_fiducial->gpuSourceTex);
				filter_fiducial->applyGPUFilters();
				fidfinder.findFiducials( filter_fiducial->gpuReadBackImageGS );
			}
		}
		else
		{
			grabFrameToCPU();
			filter->applyCPUFilters( processedImg );
			contourFinder.findContours(processedImg,  (MIN_BLOB_SIZE * 2) + 1, ((camWidth * camHeight) * .4) * (MAX_BLOB_SIZE * .001), maxBlobs, false);

			if(contourFinder.bTrackFiducials)
			{
				filter_fiducial->applyCPUFilters( processedImg_fiducial );
				fidfinder.findFiducials( processedImg_fiducial );
			}
		}

		//If Object tracking or Finger tracking is enabled
		if(contourFinder.bTrackFingers || contourFinder.bTrackObjects)
		{
			tracker.track(&contourFinder);
		}

		//Map Fiducials from camera to screen position
		if(contourFinder.bTrackFiducials)
		{
			tracker.doFiducialCalculation();
		}

		//get DSP time
		differenceTime = ofGetElapsedTimeMillis() - beforeTime;

		//Dynamic Background subtraction LearRate
		if (filter->bDynamicBG)
		{
			filter->fLearnRate = backgroundLearnRate * .0001; //If there are no blobs, add the background faster.
			if (contourFinder.nBlobs > 0) //If there ARE blobs, add the background slower.
			{
				filter->fLearnRate = backgroundLearnRate * .0001;
			}
		}//End Background Learning rate

		//Sending TUIO messages
		if (myTUIO.bOSCMode || myTUIO.bTCPMode || myTUIO.bBinaryMode)
		{
			//printf("sending data osc : %d TCP : %d binary : %d\n", myTUIO.bOSCMode, myTUIO.bTCPMode, myTUIO.bBinaryMode);
			myTUIO.setMode(contourFinder.bTrackFingers , contourFinder.bTrackObjects, contourFinder.bTrackFiducials);
//			myTUIO.sendTUIO(&getBlobs(),&getObjects(),&fidfinder.fiducialsList);
            myTUIO.sendTUIO(tracker.getPtrTrackedBlobs(), tracker.getPtrTrackedObjects(), &fidfinder.fiducialsList);
		}
	}
}