Пример #1
0
// output a packet and return its sequence number
void ARec::OutPacket(PhraseType k, string wrd, string tag,
                     int pred, int alt, float ac, float lm, float score,
                     float confidence, float nact, HTime start, HTime end)
{
   OutMarkers(start);
   ++outseqnum;
   APhraseData *pd = (APhraseData *)new APhraseData(k,outseqnum,pred);
   pd->alt = alt; pd->ac = ac; pd->lm = lm; pd->score = score;
   pd->confidence = confidence;
   pd->word = wrd;  pd->tag = tag; pd->nact = nact;
   APacket p(pd);
   p.SetStartTime(start); p.SetEndTime(end);
   out->PutPacket(p);
   if (showRD) DrawOutLine(k,start,wrd,tag);
   if (trace&T_OUT) p.Show();
	
#ifdef __APPLE__
	CFMutableDictionaryRef userInfo = CFDictionaryCreateMutable(NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);

	CFNumberRef cfPhraseType = CFNumberCreate(NULL, kCFNumberIntType, &k);
	CFDictionaryAddValue(userInfo, CFSTR("PhraseType"), cfPhraseType);
	CFRelease(cfPhraseType);
	
	CFStringRef cfWord = CFStringCreateWithCString(NULL, wrd.c_str(), kCFStringEncodingUTF8);
	CFDictionaryAddValue(userInfo, CFSTR("Word"), cfWord);
	CFRelease(cfWord);
	
	CFStringRef cfTag = CFStringCreateWithCString(NULL, tag.c_str(), kCFStringEncodingUTF8);
	CFDictionaryAddValue(userInfo, CFSTR("Tag"), cfTag);
	CFRelease(cfTag);
	
	CFNotificationCenterPostNotification(CFNotificationCenterGetLocalCenter(), CFSTR("ARec::OutPacket"), NULL, userInfo, false);
	
	CFRelease(userInfo);
#endif

}
Пример #2
0
void main()
{
	windage::Logger* log = new windage::Logger(&std::cout);

	// connect camera
	CvCapture* capture = cvCaptureFromCAM(CV_CAP_ANY);

	// saving
	bool saving = false;
	CvVideoWriter* writer = NULL;

	char message[100];
	IplImage* inputImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* undistImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* grayImage = cvCreateImage(cvGetSize(inputImage), IPL_DEPTH_8U, 1);
	IplImage* resultImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);

	// Multipel tracker Initialize
	std::vector<IplImage*> trainingImage;
	for(int i=1; i<=OBJECT_COUNT; i++)
	{
		sprintf(message, "cube/reference%d.png", i);
		trainingImage.push_back(cvLoadImage(message, 0));
	}

	windage::MultipleSURFTracker* multipleTracker = new windage::MultipleSURFTracker();
	multipleTracker->Initialize(intrinsicValues[0], intrinsicValues[1], intrinsicValues[2], intrinsicValues[3], intrinsicValues[4], intrinsicValues[5], intrinsicValues[6], intrinsicValues[7]);
	multipleTracker->InitializeOpticalFlow(WIDTH, HEIGHT, cvSize(8, 8), 3);
	multipleTracker->SetDetectIntervalTime(1.0/1.0);
	multipleTracker->SetPoseEstimationMethod(windage::RANSAC);
	multipleTracker->SetOutlinerRemove(true);
	multipleTracker->SetRefinement(true);
	multipleTracker->SetPosePointCount(FIND_FEATURE_COUNT);
	multipleTracker->SetFeatureExtractThreshold(30);
	for(int i=0; i<trainingImage.size(); i++)
	{
		std::cout << "attatch reference image #" << i << std::endl;
		multipleTracker->AttatchReferenceImage(trainingImage[i], CUBE_SIZE, CUBE_SIZE, 4.0, 8);
	}

	// for undistortion
	windage::Calibration* calibration = new windage::Calibration();
	calibration->Initialize(intrinsicValues[0], intrinsicValues[1], intrinsicValues[2], intrinsicValues[3], intrinsicValues[4], intrinsicValues[5], intrinsicValues[6], intrinsicValues[7]);
	calibration->InitUndistortionMap(WIDTH, HEIGHT);

	// adaptive threshold
	int fastThreshold = 70;
	const int MAX_FAST_THRESHOLD = 80;
	const int MIN_FAST_THRESHOLD = 40;
	const int ADAPTIVE_THRESHOLD_VALUE = 1000;
	const int THRESHOLD_STEP = 1;

	IplImage* grabFrame = NULL;
	
	bool processing = true;
	cvNamedWindow("result");
	while(processing)
	{
		// camera frame grabbing and convert to gray color
		log->updateTickCount();
		grabFrame = cvQueryFrame(capture);
		cvFlip(grabFrame, undistImage);
		calibration->Undistortion( undistImage, inputImage);
		cvCvtColor(inputImage, grayImage, CV_BGRA2GRAY);
		log->log("capture", log->calculateProcessTime());

		// call tracking algorithm
		log->updateTickCount();
		multipleTracker->SetFeatureExtractThreshold(fastThreshold);
		multipleTracker->UpdateCameraPose(grayImage);

		double trackingTime = log->calculateProcessTime();
		log->log("tracking", trackingTime);
		log->logNewLine();

		// update fast threshold for Adaptive threshold
#ifdef ADAPTIVE_THRESHOLD
		int featureCount = multipleTracker->GetFeatureCount();
		if(featureCount > ADAPTIVE_THRESHOLD_VALUE )	fastThreshold = MIN(MAX_FAST_THRESHOLD, fastThreshold+THRESHOLD_STEP);
		else											fastThreshold = MAX(MIN_FAST_THRESHOLD, fastThreshold-THRESHOLD_STEP);
#endif
		// find max matched plane
		std::vector<int> matcingCountList; matcingCountList.resize(multipleTracker->GetTrackerCount());
		int maxScoreIndex = -1;
		double maxScore = 0.0;
		for(int i=0; i<multipleTracker->GetTrackerCount(); i++)
		{
			double area = CalcReprojectionArea(multipleTracker->GetCameraParameter(i));
			int matchedCount = multipleTracker->GetMatchedCount(i);
			matcingCountList[i] = matchedCount;

			double score = Scoring(area, matchedCount);
			if(score > maxScore)
			{
				maxScore = area;
				maxScoreIndex = i;
			}

			// delete tracking points when too small space
			if(area < 2.0)
			{
				multipleTracker->DeleteTrackingPoints(i);
			}

//			std::cout << area << " : " << matchedCount << " : " << score << std::endl;
		}

		// draw tracking result
		windage::Vector3 eulerRotation;
		if(maxScoreIndex >= 0)
		if(matcingCountList[maxScoreIndex] > FIND_FEATURE_COUNT)
		{
			int i = maxScoreIndex;
			windage::Matrix4 extrinsic = CalculateMarkerExtrinsicParameter(multipleTracker->GetCameraParameter(i), GetRotation(i+1), GetTranslation(i+1));
			calibration->SetExtrinsicMatrix(extrinsic.m1);

			DrawOutLine(calibration, inputImage, false);
			calibration->DrawInfomation(inputImage, CUBE_SIZE);

			CvPoint center = multipleTracker->GetCameraParameter(i)->ConvertWorld2Image(0.0, 0.0, 0.0);
			
			center.x += 10;
			center.y += 10;
			sprintf(message, "Reference #%d", i);
			windage::Utils::DrawTextToImage(inputImage, center, message);

			eulerRotation = GetMarkerRotation(calibration);

//			multipleTracker->DrawDebugInfo(inputImage, maxScoreIndex);
		}

		sprintf(message, "Tracking Time : %.2f(ms)", trackingTime);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 30), message);
		sprintf(message, "FAST feature count : %d, threashold : %d", featureCount, fastThreshold);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 50), message);
		sprintf(message, "Match count ");
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 70), message);
		for(int i=0; i<OBJECT_COUNT; i++)
		{
			sprintf(message, "#%d:%d ", i, multipleTracker->GetMatchedCount(i));
			windage::Utils::DrawTextToImage(inputImage, cvPoint(160 + 65*i, 70), message);
		}

		eulerRotation *= 180.0/CV_PI;
		sprintf(message, "Rotation : %.2lf, %.2lf, %.2lf", eulerRotation.x, eulerRotation.y, eulerRotation.z);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 90), message); 


		if(saving)
		{
			if(writer) cvWriteFrame(writer, inputImage);
		}

		char ch = cvWaitKey(1);
		switch(ch)
		{
		case 's':
		case 'S':
			saving = true;
			if(writer) cvReleaseVideoWriter(&writer);
			writer = cvCreateVideoWriter("saveimage\\capture.avi", CV_FOURCC_DEFAULT, 30, cvSize(inputImage->width, inputImage->height), 1);
			break;
		case 'q':
		case 'Q':
			processing = false;
			break;
		}

		cvShowImage("result", inputImage);
	}

	if(writer) cvReleaseVideoWriter(&writer);
	cvReleaseCapture(&capture);
	cvDestroyAllWindows();
}
Пример #3
0
void Display::CSimpleFillSymbol::Draw(void* pObject)
{


	_ASSERT(pObject != NULL);

	DIS_OBJ_TYPE type =((DIS_POLYGON*)pObject)->type;

	if(type == Dis_Rect)
	{
		DIS_RECT  *pDIS_RECT = (DIS_RECT *)pObject;
		if( !CheckNoColor(m_lColor))
		{
			if( pDIS_RECT == NULL || pDIS_RECT->type != Dis_Rect )
				return;

			m_pDC->DrawAndFillRect( *pDIS_RECT );
		}

		if(m_bOutLine)
		{
			m_OutLineSymbol->SetReadyDraw();
			DrawOutLine( pDIS_RECT );
			SetReadyDraw();
		}
	}
	else if(type==Dis_Circle)
	{
		//»æÖÆÔ²
		DIS_CIRCLE* pCircle =(DIS_CIRCLE*)pObject;

		//Ìî³äÔ²
		if( !CheckNoColor(m_lColor))
		{

			m_pDC->FillCircle( *pCircle );
		}

		if(m_bOutLine)
		{
			m_OutLineSymbol->SetReadyDraw();
			DrawOutLine( pCircle );
			SetReadyDraw();
		}

	}
	else if(type==Dis_Ellipse)
	{
		//»æÖÆÍÖÔ²
		DIS_ELLIPSE* pEllipse =(DIS_ELLIPSE*)pObject;

		//Ìî³äÍÖÔ²
		if( !CheckNoColor(m_lColor))
		{

			DIS_RECT rect;
			rect.left =pEllipse->left;
			rect.right =pEllipse->right;
			rect.top =pEllipse->top;
			rect.bottom =pEllipse->bottom;

			m_pDC->FillEllipse( rect );
		}

		if(m_bOutLine)
		{
			m_OutLineSymbol->SetReadyDraw();
			DrawOutLine( pEllipse );
			SetReadyDraw();
		}


	}
	else if(type==Dis_ArcPolygon)
	{
		DIS_ARCPOLYGON *pArcPolygon =(DIS_ARCPOLYGON*)pObject;

		//Ìî³ä
		if( !CheckNoColor(m_lColor))
		{

			m_pDC->FillArcPolygon( pArcPolygon );
		}

		if(m_bOutLine)
		{
			m_OutLineSymbol->SetReadyDraw();
			DrawOutLine( pArcPolygon );
			SetReadyDraw();
		}
	}
	else
	{
		DIS_POLYGON  *pDIS_POLYGON = (DIS_POLYGON *)pObject;
		if( !CheckNoColor(m_lColor))
		{
			if( pDIS_POLYGON == NULL || pDIS_POLYGON->type != Dis_Polygon )
				return;

			m_pDC->DrawPolygon( *pDIS_POLYGON );
		}

		if(m_bOutLine)
		{
			m_OutLineSymbol->SetReadyDraw();
			DrawOutLine( pDIS_POLYGON );
			SetReadyDraw();
		}
	}


}