vector<pair<VectorXd, VectorXd>> RectificationController::pointsToLinesPairs(ClickableLabel* label)
	{
		CircularList<SelectedPixel*>* points = label->getSelectedPixels();
		
		vector<pair<VectorXd, VectorXd>> linePairs;
		VectorXd linePair[2];
			
		for (int j = 0; j < points->size() * 0.25f; ++j) // Parallel line pair creation.
		{
			VectorXd line;
			for (int i = 0; i < 2; ++i) // Line creation.
			{
				QPoint qP0 = (*points)[j*4 + i*2]->getPos();
				QPoint qP1 = (*points)[j*4 + i*2 + 1]->getPos();
				Vector3d p0(3);
				p0 << qP0.x(), qP0.y(), 1.;
				Vector3d p1(3);
				p1 << qP1.x(), qP1.y(), 1.;
				Vector3d line = p0.cross(p1);
				line /= line[2];
				
				cout << "Image size: " << endl << label->size().width() << endl
				<< label->size().height() << endl << endl << "P0: " << endl << p0 << endl << endl
				<< "P1: " << endl << p1 << endl << endl << "Line: " << endl << line << endl << endl;
				
				linePair[i] = line;
			}
			linePairs.push_back( pair<VectorXd, VectorXd>(linePair[0], linePair[1]) );
		}
		
		return linePairs;
	}
	QPixmap RectificationController::assistedFromProjectionToSimilarity(ClickableLabel* projectedImageLabel, const QSize& POISize,
											 bool pointOfInterestFlag)
	{	
		// First, create pairs of point correlations between projection and world space.
		CircularList<SelectedPixel*>* selectedPixelsProj = projectedImageLabel->getSelectedPixels();
		int numSelectedPixels = selectedPixelsProj->size();
		
		vector<QPoint> selectedPixelsWorld(numSelectedPixels);
		selectedPixelsWorld[0] = QPoint(0, 0);
		selectedPixelsWorld[1] = selectedPixelsWorld[0] + QPoint(0, POISize.height());
		selectedPixelsWorld[2] = selectedPixelsWorld[0] + QPoint(POISize.width(), POISize.height());
		selectedPixelsWorld[3] = selectedPixelsWorld[0] + QPoint(POISize.width(), 0);
		
		vector<pair<VectorXd, VectorXd>> correlationPoints(numSelectedPixels);
		
		for (int i = 0; i < numSelectedPixels; ++i)
		{
			QPoint qProjectedPoint = (*selectedPixelsProj)[i]->getPos();
			QPoint qWorldPoint = selectedPixelsWorld[i];
			
			VectorXd projectedPoint(3);
			projectedPoint << qProjectedPoint.x(), qProjectedPoint.y(), 1.;
			VectorXd worldPoint(3);
			worldPoint << qWorldPoint.x(), qWorldPoint.y(), 1.;
			
			pair<VectorXd, VectorXd> correlationPair(projectedPoint, worldPoint);
			correlationPoints[i] = correlationPair;
		}
		
		// Second, define the projection to world transformation.
		AssistedSimilarityFromProjRectificator rectificator(correlationPoints);
		MatrixXd projToWorld = *rectificator.getTransformation();
		
		// Qt uses the transpose of the usual transformation representation.
		QTransform qProjToWorld(
			projToWorld(0, 0), projToWorld(1, 0), projToWorld(2, 0),
			projToWorld(0, 1), projToWorld(1, 1), projToWorld(2, 1),
			projToWorld(0, 2), projToWorld(1, 2), projToWorld(2, 2)
		);
		
		QPixmap rectifiedPixmap = projectedImageLabel->pixmap()->transformed(qProjToWorld, Qt::SmoothTransformation);
		
		if (pointOfInterestFlag)
		{
			return selectPointOfInterest(rectifiedPixmap, qProjToWorld, projectedImageLabel->pixmap()->size(), (*selectedPixelsProj)[0]->getPos(), POISize);
		}
		else
		{
			return rectifiedPixmap;
		}
	}
int main()
{
	CircularList<int> A;
	A.InsertFront(1);
	A.InsertFront(2);
	A.InsertFront(3);
	A.InsertFront(4);
	ChainIterator<int> B(A);
	for (;B!=A.header ; B++)
	{
		cout << *B << " ";

	}
}
	QPixmap RectificationController::toAffineFromProjection(ClickableLabel* projectedImageLabel)
	{
		CircularList<SelectedPixel*>* points = projectedImageLabel->getSelectedPixels();
		if (points->size() != 8)
		{
			throw logic_error("8 points that define two pairs of parallel lines in affine space should be"
				"selected in the label"
			);
		}
		
		vector<pair<VectorXd, VectorXd>> parallelPairs = pointsToLinesPairs(projectedImageLabel);
		
		AffineFromProjRectificator rectificator(parallelPairs);
		MatrixXd projToAffine = *rectificator.getTransformation();
		
		// Qt uses the transpose of the usual transformation representation.
		QTransform qProjToAffine(
			projToAffine(0, 0), projToAffine(1, 0), projToAffine(2, 0),
			projToAffine(0, 1), projToAffine(1, 1), projToAffine(2, 1),
			projToAffine(0, 2), projToAffine(1, 2), projToAffine(2, 2)
		);
		
		return projectedImageLabel->pixmap()->transformed(qProjToAffine, Qt::SmoothTransformation);
	}
	QPixmap RectificationController::toSimilarityFromProjection(ClickableLabel* projectionImageLabel)
	{
		CircularList<SelectedPixel*>* points = projectionImageLabel->getSelectedPixels();
		if (points->size() != 20)
		{
			throw logic_error("20 points that define five pairs of orthogonal lines in similarity space should be"
				"selected in the label."
			);
		}
		
		vector<pair<VectorXd, VectorXd>> orthoPairs = pointsToLinesPairs(projectionImageLabel);
		SimilarityFromProjRectificator rectificator = SimilarityFromProjRectificator(orthoPairs);
		
		MatrixXd projToSimilarity = *rectificator.getTransformation();
		
		// Qt uses the transpose of the usual transformation representation.
		QTransform qProjToSimilarity(
			projToSimilarity(0, 0), projToSimilarity(1, 0), projToSimilarity(2, 0),
			projToSimilarity(0, 1), projToSimilarity(1, 1), projToSimilarity(2, 1),
			projToSimilarity(0, 2), projToSimilarity(1, 2), projToSimilarity(2, 2)
		);
		
		return projectionImageLabel->pixmap()->transformed(qProjToSimilarity, Qt::SmoothTransformation);
	}
int main()
{
	srand((unsigned)time(NULL));
	string playerName;
	int nrOfCardDecks = 1;
	int cardSum = 0;
	int cardPos = 0;
	Card lastCard;

	Card cardDeck[SIZE];

	makeCardDeck(cardDeck);
	shuffleCardDeck(cardDeck);
	
	CircularList<Player> players;

	addPlayers(players);
	cout<<endl;
	cardPos = dealCards(cardDeck, players);
	
	while (players.size() > 1)
	{
		cout<<"Ny runda"<<endl;

		cardSum = 0;
		while (handleCurrentPlayer(cardDeck, cardPos, players, cardSum));
		
		cout<<players.currentItem().getName()<<" förlorade dennna runda"<<endl;
		players.remove(players.currentItem());
		// ny giv
		shuffleCardDeck(cardDeck);
		cardPos = dealCards(cardDeck, players);
	}

	cout<<"Segrare :"<<players.currentItem().getName()<<endl;

	return 0;	
}
Ejemplo n.º 7
0
void OBS::EncodeLoop()
{
    QWORD streamTimeStart = GetQPCTimeNS();
    QWORD frameTimeNS = 1000000000/fps;
    bool bufferedFrames = true; //to avoid constantly polling number of frames
    int numTotalDuplicatedFrames = 0, numTotalFrames = 0, numFramesSkipped = 0;

    bufferedTimes.Clear();

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;

    QWORD sleepTargetTime = streamTimeStart+frameTimeNS;
    latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000;
    latestVideoTimeNS = streamTimeStart;

    firstFrameTimestamp = 0;

    UINT encoderInfo = 0;
    QWORD messageTime = 0;

    EncoderPicture *lastPic = NULL;

    UINT skipThreshold = encoderSkipThreshold*2;
    UINT no_sleep_counter = 0;

    CircularList<QWORD> bufferedTimes;

    while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) {
        if (!SleepToNS(sleepTargetTime += (frameTimeNS/2)))
            no_sleep_counter++;
        else
            no_sleep_counter = 0;

        latestVideoTime = sleepTargetTime/1000000;
        latestVideoTimeNS = sleepTargetTime;

        if (no_sleep_counter < skipThreshold) {
            SetEvent(hVideoEvent);
            if (encoderInfo) {
                if (messageTime == 0) {
                    messageTime = latestVideoTime+3000;
                } else if (latestVideoTime >= messageTime) {
                    RemoveStreamInfo(encoderInfo);
                    encoderInfo = 0;
                    messageTime = 0;
                }
            }
        } else {
            numFramesSkipped++;
            if (!encoderInfo)
                encoderInfo = AddStreamInfo(Str("EncoderLag"), StreamInfoPriority_Critical);
            messageTime = 0;
        }

        if (!SleepToNS(sleepTargetTime += (frameTimeNS/2)))
            no_sleep_counter++;
        else
            no_sleep_counter = 0;
        bufferedTimes << latestVideoTime;

        if (curFramePic && firstFrameTimestamp) {
            while (bufferedTimes[0] < firstFrameTimestamp)
                bufferedTimes.Remove(0);

            DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp);
            bufferedTimes.Remove(0);

            profileIn("encoder thread frame");

            FrameProcessInfo frameInfo;
            frameInfo.firstFrameTime = firstFrameTimestamp;
            frameInfo.frameTimestamp = curFrameTimestamp;
            frameInfo.pic = curFramePic;

            if (lastPic == frameInfo.pic)
                numTotalDuplicatedFrames++;

            if(bUsingQSV)
                curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp;
            else
                curFramePic->picOut->i_pts = curFrameTimestamp;

            ProcessFrame(frameInfo);

            if (bShutdownEncodeThread)
                bufferedFrames = videoEncoder->HasBufferedFrames();

            lastPic = frameInfo.pic;

            profileOut;

            numTotalFrames++;
        }
    }

    //flush all video frames in the "scene buffering time" buffer
    if (firstFrameTimestamp && bufferedVideo.Num())
    {
        QWORD startTime = GetQPCTimeMS();
        DWORD baseTimestamp = bufferedVideo[0].timestamp;

        for(UINT i=0; i<bufferedVideo.Num(); i++)
        {
            //we measure our own time rather than sleep between frames due to potential sleep drift
            QWORD curTime;
            do
            {
                curTime = GetQPCTimeMS();
                OSSleep (1);
            } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp);

            SendFrame(bufferedVideo[i], firstFrameTimestamp);
            bufferedVideo[i].Clear();

            numTotalFrames++;
        }

        bufferedVideo.Clear();
    }

    Log(TEXT("Total frames encoded: %d, total frames duplicated: %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (numTotalFrames > 0) ? (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0 : 0.0f);
    if (numFramesSkipped)
        Log(TEXT("Number of frames skipped due to encoder lag: %d (%0.2f%%)"), numFramesSkipped, (numTotalFrames > 0) ? (double(numFramesSkipped)/double(numTotalFrames))*100.0 : 0.0f);

    SetEvent(hVideoEvent);
    bShutdownVideoThread = true;
}
Ejemplo n.º 8
0
void task3() {
  CircularList<int> clist;  
  cout << clist << endl;
  clist.insert(0,8);
  cout << clist << endl;
  clist.insert(1,5);
  cout << clist << endl;
  clist.insert(2,4);
  cout << clist << endl;
  clist.insert(0,0);
  cout << clist << endl;
  clist.insert(3,7);
  cout << clist << endl;
  clist.insert(5,9);
  cout << clist << endl;

  try { clist.insert(10,99); } catch (const char * e) { cout << e << endl; }

  cout << endl;
  
  CircularList<int> clist2(clist); 

  cout << clist.remove(5) << ": " << clist <<  endl;
  cout << clist.remove(1) << ": " << clist << endl;
  cout << clist.remove(0) << ": " << clist << endl;;

  cout << endl;

  cout << clist2 << endl;
  clist = clist2;
  clist2.clear();
  if(clist2.isEmpty()) cout << "Clear works correctly" << endl;
  cout << clist2 << endl;
  cout << clist << endl;
  cout << endl;

  CircularList<int> * cPtr = clist.clone();
  cout << *cPtr << endl;
  cout << cPtr->get(3) << " " << cPtr->get(0) << " " << cPtr->get(5) << endl;
  cout << *cPtr << endl;
  delete cPtr;
}
Ejemplo n.º 9
0
void OBS::EncodeLoop()
{
    QWORD streamTimeStart = GetQPCTimeNS();
    QWORD frameTimeNS = 1000000000/fps;
    bool bufferedFrames = true; //to avoid constantly polling number of frames
    int numTotalDuplicatedFrames = 0, numTotalFrames = 0;

    bufferedTimes.Clear();

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;

    QWORD sleepTargetTime = streamTimeStart+frameTimeNS;
    latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000;
    latestVideoTimeNS = streamTimeStart;

    firstFrameTimestamp = 0;

    EncoderPicture *lastPic = NULL;

    CircularList<QWORD> bufferedTimes;

    while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) {
        SetEvent(hVideoEvent);
        SleepToNS(sleepTargetTime);
        latestVideoTime = sleepTargetTime/1000000;
        latestVideoTimeNS = sleepTargetTime;

        bufferedTimes << latestVideoTime;

        if (curFramePic && firstFrameTimestamp) {
            while (bufferedTimes[0] < firstFrameTimestamp)
                bufferedTimes.Remove(0);

            DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp);
            bufferedTimes.Remove(0);

            profileIn("encoder thread frame");

            FrameProcessInfo frameInfo;
            frameInfo.firstFrameTime = firstFrameTimestamp;
            frameInfo.frameTimestamp = curFrameTimestamp;
            frameInfo.pic = curFramePic;

            if (lastPic == frameInfo.pic)
                numTotalDuplicatedFrames++;

            if(bUsingQSV)
                curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp;
            else
                curFramePic->picOut->i_pts = curFrameTimestamp;

            ProcessFrame(frameInfo);

            if (bShutdownEncodeThread)
                bufferedFrames = videoEncoder->HasBufferedFrames();

            lastPic = frameInfo.pic;

            profileOut;

            numTotalFrames++;
        }

        sleepTargetTime += frameTimeNS;
    }

    //flush all video frames in the "scene buffering time" buffer
    if (firstFrameTimestamp && bufferedVideo.Num())
    {
        QWORD startTime = GetQPCTimeMS();
        DWORD baseTimestamp = bufferedVideo[0].timestamp;

        for(UINT i=0; i<bufferedVideo.Num(); i++)
        {
            //we measure our own time rather than sleep between frames due to potential sleep drift
            QWORD curTime;
            do
            {
                curTime = GetQPCTimeMS();
                OSSleep (1);
            } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp);

            SendFrame(bufferedVideo[i], firstFrameTimestamp);
            bufferedVideo[i].Clear();

            numTotalFrames++;
        }

        bufferedVideo.Clear();
    }

    Log(TEXT("Total frames encoded: %d, total frames duplicated %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);

    SetEvent(hVideoEvent);
    bShutdownVideoThread = true;
}
Ejemplo n.º 10
0
void GfxCanvas::dumpTiming()
{
    for(int ii = 0; ii < gfxCanvasTiming.size(); ++ii)
        qWarning() << gfxCanvasTiming[ii].first << gfxCanvasTiming[ii].second;
    gfxCanvasTiming.clear();
}
Ejemplo n.º 11
0
void GfxCanvas::addFrameTime(int time, const QRegion &region)
{
    gfxCanvasTiming.append(qMakePair(time, region));
}
Ejemplo n.º 12
0
int main()
{
	CircularList<int> list;
	for(int i=0;i<20;i++){
		list.Insert(i*3,i);
	}
	cout<<"the Length of the list is "<<list.Length()<<endl;
	list.Print();
	for(int j=0;j<5;j++){
		list.Insert(3,j*3);
	}
	cout<<"the Length of the list is "<<list.Length()<<endl;
	list.Print();

	list.Remove(5);
	cout<<"the Length of the list is "<<list.Length()<<endl;
	list.Print();

	list.RemoveAll(3);
	cout<<"the Length of the list is "<<list.Length()<<endl;
	list.Print();

	cout<<"The third element is "<<list.Get(3)<<endl;

	list.MakeEmpty();
	cout<<"the Length of the list is "<<list.Length()<<endl;
	list.Print();


	return 0;
}