示例#1
0
文件: widget.cpp 项目: youngjeff/qt
Widget::Widget(QWidget *parent) :
    QGraphicsView(parent),
    ui(new Ui::Widget)
{

    ui->setupUi(this);
    m_scene = new QGraphicsScene;
    setScene(m_scene);
    m_scene->setSceneRect(3-width()/2,3-height()/2,width()-6,height()-6);

    m_bird = new bird();
    m_scene->addItem(m_bird);
    m_bird->setPos(0,0);

    m_edge_1 = new edge();
    m_scene->addItem(m_edge_1);
    m_edge_1->setPos(m_edge_1->boundingRect().width()/4,-height()/2+7);

    m_edge_2 = new edge();
    m_scene->addItem(m_edge_2);
    m_edge_2->setPos(m_edge_2->boundingRect().width()/4,height()/2-7);
    m_edge_1->BeginMove();
    m_edge_2->BeginMove();

    hideButton();
    m_timer = new QTimer(this);
    connect(m_timer,SIGNAL(timeout()),this,SLOT(beginBarrier()));//barrier

    connect(m_bird,SIGNAL(died()),this,SLOT(onBirdDied()));

    for(int i =0;i<BARRIER_NUM;i++)
    {
        m_barr[i] = new barrier();
        m_scene->addItem(m_barr[i]);
    }
    initBarrier();

    num = 0;
    QTimer::singleShot(1000,this,SLOT(Scoring()));

    score = 0;
    ScoringTimer = new QTimer(this);

    connect(ScoringTimer,SIGNAL(timeout()),this,SLOT(Scoring()));

    settings = new QSettings("score.ini",QSettings::IniFormat);

//    setWindowFlags(Qt::FramelessWindowHint);
}
示例#2
0
文件: main.cpp 项目: Barbakas/windage
void main()
{
	windage::Logger* log = new windage::Logger(&std::cout);

	// connect camera
	CvCapture* capture = cvCaptureFromCAM(CV_CAP_ANY);

	// saving
	bool saving = false;
	CvVideoWriter* writer = NULL;

	char message[100];
	IplImage* inputImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* undistImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* grayImage = cvCreateImage(cvGetSize(inputImage), IPL_DEPTH_8U, 1);
	IplImage* resultImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);

	// Multipel tracker Initialize
	std::vector<IplImage*> trainingImage;
	for(int i=1; i<=OBJECT_COUNT; i++)
	{
		sprintf(message, "cube/reference%d.png", i);
		trainingImage.push_back(cvLoadImage(message, 0));
	}

	windage::MultipleSURFTracker* multipleTracker = new windage::MultipleSURFTracker();
	multipleTracker->Initialize(intrinsicValues[0], intrinsicValues[1], intrinsicValues[2], intrinsicValues[3], intrinsicValues[4], intrinsicValues[5], intrinsicValues[6], intrinsicValues[7]);
	multipleTracker->InitializeOpticalFlow(WIDTH, HEIGHT, cvSize(8, 8), 3);
	multipleTracker->SetDetectIntervalTime(1.0/1.0);
	multipleTracker->SetPoseEstimationMethod(windage::RANSAC);
	multipleTracker->SetOutlinerRemove(true);
	multipleTracker->SetRefinement(true);
	multipleTracker->SetPosePointCount(FIND_FEATURE_COUNT);
	multipleTracker->SetFeatureExtractThreshold(30);
	for(int i=0; i<trainingImage.size(); i++)
	{
		std::cout << "attatch reference image #" << i << std::endl;
		multipleTracker->AttatchReferenceImage(trainingImage[i], CUBE_SIZE, CUBE_SIZE, 4.0, 8);
	}

	// for undistortion
	windage::Calibration* calibration = new windage::Calibration();
	calibration->Initialize(intrinsicValues[0], intrinsicValues[1], intrinsicValues[2], intrinsicValues[3], intrinsicValues[4], intrinsicValues[5], intrinsicValues[6], intrinsicValues[7]);
	calibration->InitUndistortionMap(WIDTH, HEIGHT);

	// adaptive threshold
	int fastThreshold = 70;
	const int MAX_FAST_THRESHOLD = 80;
	const int MIN_FAST_THRESHOLD = 40;
	const int ADAPTIVE_THRESHOLD_VALUE = 1000;
	const int THRESHOLD_STEP = 1;

	IplImage* grabFrame = NULL;
	
	bool processing = true;
	cvNamedWindow("result");
	while(processing)
	{
		// camera frame grabbing and convert to gray color
		log->updateTickCount();
		grabFrame = cvQueryFrame(capture);
		cvFlip(grabFrame, undistImage);
		calibration->Undistortion( undistImage, inputImage);
		cvCvtColor(inputImage, grayImage, CV_BGRA2GRAY);
		log->log("capture", log->calculateProcessTime());

		// call tracking algorithm
		log->updateTickCount();
		multipleTracker->SetFeatureExtractThreshold(fastThreshold);
		multipleTracker->UpdateCameraPose(grayImage);

		double trackingTime = log->calculateProcessTime();
		log->log("tracking", trackingTime);
		log->logNewLine();

		// update fast threshold for Adaptive threshold
#ifdef ADAPTIVE_THRESHOLD
		int featureCount = multipleTracker->GetFeatureCount();
		if(featureCount > ADAPTIVE_THRESHOLD_VALUE )	fastThreshold = MIN(MAX_FAST_THRESHOLD, fastThreshold+THRESHOLD_STEP);
		else											fastThreshold = MAX(MIN_FAST_THRESHOLD, fastThreshold-THRESHOLD_STEP);
#endif
		// find max matched plane
		std::vector<int> matcingCountList; matcingCountList.resize(multipleTracker->GetTrackerCount());
		int maxScoreIndex = -1;
		double maxScore = 0.0;
		for(int i=0; i<multipleTracker->GetTrackerCount(); i++)
		{
			double area = CalcReprojectionArea(multipleTracker->GetCameraParameter(i));
			int matchedCount = multipleTracker->GetMatchedCount(i);
			matcingCountList[i] = matchedCount;

			double score = Scoring(area, matchedCount);
			if(score > maxScore)
			{
				maxScore = area;
				maxScoreIndex = i;
			}

			// delete tracking points when too small space
			if(area < 2.0)
			{
				multipleTracker->DeleteTrackingPoints(i);
			}

//			std::cout << area << " : " << matchedCount << " : " << score << std::endl;
		}

		// draw tracking result
		windage::Vector3 eulerRotation;
		if(maxScoreIndex >= 0)
		if(matcingCountList[maxScoreIndex] > FIND_FEATURE_COUNT)
		{
			int i = maxScoreIndex;
			windage::Matrix4 extrinsic = CalculateMarkerExtrinsicParameter(multipleTracker->GetCameraParameter(i), GetRotation(i+1), GetTranslation(i+1));
			calibration->SetExtrinsicMatrix(extrinsic.m1);

			DrawOutLine(calibration, inputImage, false);
			calibration->DrawInfomation(inputImage, CUBE_SIZE);

			CvPoint center = multipleTracker->GetCameraParameter(i)->ConvertWorld2Image(0.0, 0.0, 0.0);
			
			center.x += 10;
			center.y += 10;
			sprintf(message, "Reference #%d", i);
			windage::Utils::DrawTextToImage(inputImage, center, message);

			eulerRotation = GetMarkerRotation(calibration);

//			multipleTracker->DrawDebugInfo(inputImage, maxScoreIndex);
		}

		sprintf(message, "Tracking Time : %.2f(ms)", trackingTime);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 30), message);
		sprintf(message, "FAST feature count : %d, threashold : %d", featureCount, fastThreshold);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 50), message);
		sprintf(message, "Match count ");
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 70), message);
		for(int i=0; i<OBJECT_COUNT; i++)
		{
			sprintf(message, "#%d:%d ", i, multipleTracker->GetMatchedCount(i));
			windage::Utils::DrawTextToImage(inputImage, cvPoint(160 + 65*i, 70), message);
		}

		eulerRotation *= 180.0/CV_PI;
		sprintf(message, "Rotation : %.2lf, %.2lf, %.2lf", eulerRotation.x, eulerRotation.y, eulerRotation.z);
		windage::Utils::DrawTextToImage(inputImage, cvPoint(20, 90), message); 


		if(saving)
		{
			if(writer) cvWriteFrame(writer, inputImage);
		}

		char ch = cvWaitKey(1);
		switch(ch)
		{
		case 's':
		case 'S':
			saving = true;
			if(writer) cvReleaseVideoWriter(&writer);
			writer = cvCreateVideoWriter("saveimage\\capture.avi", CV_FOURCC_DEFAULT, 30, cvSize(inputImage->width, inputImage->height), 1);
			break;
		case 'q':
		case 'Q':
			processing = false;
			break;
		}

		cvShowImage("result", inputImage);
	}

	if(writer) cvReleaseVideoWriter(&writer);
	cvReleaseCapture(&capture);
	cvDestroyAllWindows();
}