Exemple #1
0
vector<vector<Point2f>> GetImagePoints(cModuleSync * sync)
{
	//== Create Image Points Vector of Vectors ==-- DBE
	vector<vector<Point2f>> imagePoints (RECORDED_FRAMES);
	for(int i = 0; i < RECORDED_FRAMES; i++)
		imagePoints[i].resize(NUMBER_OF_POINTS);
	
	//== Initialize Total Recorded Frame Count ==-- DBE
	int TotalFrames = 0;

	//== Pool for frame groups ==--
    while(!_kbhit() && TotalFrames < RECORDED_FRAMES)
    {
        FrameGroup *frameGroup = sync->GetFrameGroup();

        if(frameGroup)
        {
            //== Print something every 100 frames ==--

            static int frameThrottle = X_FRAMES;
            static int frameCount    = 0;
            frameThrottle = (frameThrottle+1)%100;

            frameCount++;

            if(frameThrottle==0)
            {
                //== Ok, lets print something about this frame group ==--

                printf("Received Group Frame #%d (Contains %d frames)   ", frameCount, frameGroup->Count());

                if(sync->LastFrameGroupMode()==FrameGroup::Hardware)
                    printf("Synchronized\n");
                else
                    printf("Unsynchronized\n");

                for(int i=0; i<frameGroup->Count(); i++)
                {
                    Frame * frame = frameGroup->GetFrame(i);

                    printf("  - Camera #%d is reporting %d 2D objects\n", i, frame->ObjectCount());
					
					// print the objects and display -NR
					cv::Mat M;
					M.create(frame->GetCamera()->Height(), frame->GetCamera()->Width(), CV_32F);
					for(int j = 0; j < frame->ObjectCount(); j++) {
						cObject *object = frame->Object(j);
						printf("\tObject #%d: (%4.2f,%4.2f)\n", j, object->X(), object->Y());
						cv::circle(M, cv::Point2f(object->X(), object->Y()), object->Radius(), cv::Scalar(255, 0, 0));
						//== Record a point into imagePoints ==-- DBE
						if(frame->ObjectCount() == NUMBER_OF_POINTS)
						{
							imagePoints[TotalFrames][j] = Point2f(object->X(), object->Y());
						}
					}

					if(frame->ObjectCount() == NUMBER_OF_POINTS)
						TotalFrames++;

					if(frame->ObjectCount()) {
						cv::imshow("objects", M);
						cv::waitKey(2);
					}

                    frame->Release();
                }

                printf("\n");
            }

            frameGroup->Release();
        }

        Sleep(2);
    }
	return imagePoints;
}
void SynCaptureThread::run()
{
	Camera **cameras = new Camera*[_numCameras];
	CameraList list;
	
	for(int i=0; i<list.Count(); i++)
	{

		cameras[i] = CameraManager::X().GetCamera(list[i].UID());

	}
	//== Create and attach frame synchronizer ==--

	cModuleSync * sync = cModuleSync::Create();
	for(int i=0; i<_numCameras; i++)
	{
		sync->AddCamera(cameras[i]);
	}

	for(int i=0; i<_numCameras; i++)
	{
		cameras[i]->Start();
		cameras[i]->SetVideoType(Core::SegmentMode);
	}

	while(1)
	{
		if(_videoStatus == Para::PLAY_LIVE)
		{
			FrameGroup *frameGroup = sync->GetFrameGroup();
			if(frameGroup)
			{
				for(int i = 0; i < _POIsList.size(); i++)
					_POIsList[i].clear();
				for(int i = 0; i<frameGroup->Count(); i++)
				{
					Frame * frame = frameGroup->GetFrame(i);
					vector<POI> newPOIs;
					for(int j = 0; j < frame->ObjectCount(); j++)
					{				
						float area = frame->Object(j)->Area();
						if(area > Para::markerFilterThreshold)
						{
							float x = frame->Object(j)->X();
							float y = frame->Object(j)->Y();
							POI temp(x, y);
							if(!ContainNoise(_noisyList[i], temp))         // if temp is not in the nosiyList, which means temp is not a noise
								newPOIs.push_back(temp);
						}										
					}

					frame->Release();
					_POIsList[i] = newPOIs;
				}
				frameGroup->Release();
				emit signalPOIsListReady(_POIsList);
			}
		}
		else if( _videoStatus == Para::CALIBRATE)
		{
			FrameGroup *frameGroup = sync->GetFrameGroup();
			if(frameGroup)
			{
				for(int i = 0; i < _POIsList.size(); i++)
					_POIsList[i].clear();
				_frameCount++;
				if(_frameCount > Para::maxFrame * Para::frameInterval)             // change to PLAY_LIVE mode when we have 500 frames
				{
					WriteCalibratedPoints();
					_videoStatus = Para::PLAY_LIVE;
					emit signalCalibrationReady();
				}
				for(int i = 0; i<frameGroup->Count(); i++)
				{
					Frame * frame = frameGroup->GetFrame(i);
					vector<POI> newPOIs;
					for(int j = 0; j < frame->ObjectCount(); j++)
					{				
						float area = frame->Object(j)->Area();
						if(area > Para::markerFilterThreshold)
						{
							float x = frame->Object(j)->X();
							float y = frame->Object(j)->Y();
							POI temp(x, y);
							if(!ContainNoise(_noisyList[i], temp))         // if temp is not in the nosiyList, which means temp is not a noise
								newPOIs.push_back(temp);
						}										
					}

					frame->Release();
					_POIsList[i] = newPOIs;
				}

				if(_frameCount % Para::frameInterval == 0)             // sample points each 3 frames
				{
					for(int i = 0; i < _POIsList.size(); i++)  
					{
						if(_POIsList[i].size() == 0 || _POIsList[i].size() > 1)
						{
							_calibratedPoints[_frameCount / Para::frameInterval].push_back(Point2f(-1.0,-1.0));
							continue;
						}
						for(int j = 0; j < _POIsList[i].size(); j++)     // now we have only one calibration point
						{
							_calibratedPoints[_frameCount / Para::frameInterval].push_back(Point2f(_POIsList[i][j]._coordinates2d.x, _POIsList[i][j]._coordinates2d.y));
						}			
					}			
				}

				frameGroup->Release();
				emit signalPOIsListReady(_POIsList);
			}
		}
		else if(_videoStatus == Para::DENOISE)
		{
			FrameGroup *frameGroup = sync->GetFrameGroup();
			if(frameGroup)
			{
				for(int i = 0; i < _POIsList.size(); i++)
					_POIsList[i].clear();
				for(int i = 0; i < frameGroup->Count(); i++)
				{
					Frame * frame = frameGroup->GetFrame(i);
					for(int j = 0; j < frame->ObjectCount(); j++)
					{				
						float area = frame->Object(j)->Area();
						if(area > Para::markerFilterThreshold)
						{
							float x = frame->Object(j)->X();
							float y = frame->Object(j)->Y();
							POI temp(x, y);
							if(!ContainNoise(_noisyList[i], temp))         // if temp is not in the nosiyList, which means temp is not a noise
								_noisyList[i].push_back(temp);
						}										
					}
					frame->Release();
				}

				frameGroup->Release();
			}
		}
		else if(_videoStatus == Para::STOP)
		{

		}
		else if(_videoStatus == Para::BREAK)
			break;
	}
	delete[] cameras;
	exec();
}