void CorrectionDialog::updatePreview()
{
    PNM tImage(image());

    if (ui->previewBox->isChecked())
    {
        Correction bc(&tImage);
        bc.setParameter("shift", shift);
        bc.setParameter("factor", multiplier);
        bc.setParameter("gamma", gamma);
        bc.setParameter("silent", true);
        ui->preview->resize(image().size());
        PNM* ptr_tImage = bc.transform();
        ui->preview->setPixmap(QPixmap::fromImage(ptr_tImage->copy()));
        delete ptr_tImage;
    }
    else
        ui->preview->setPixmap(QPixmap::fromImage(image()));
}
コード例 #2
0
void opticsmountGSP::captureAndRectifyImages(cv::Mat& leftImg, cv::Mat& rightImg)
{

	cout << "captureAndRectifyImages entered" << endl;

	cv::Mat combined_img;

	if (cameras.useCameras)
	{
		GSPretVal = cameras.captureTwoImages(leftImg, rightImg, &cameras.leftImgNum, &cameras.rightImgNum, this->synchCheckFlag);
		if (GSPretVal != 0)
		{
			printf("Capture Two Images Failed\n");
		}

		this->updateTime();

		this->prevImageTimeStamp = this->imageTimeStamp;
		this->imageTimeStamp = this->testtime;
		double deltaTime = imageTimeStamp - prevImageTimeStamp;

		if (deltaTime > 2*(1000.0 / this->cameras.getFrameRate())) {
///			std::cout << "Lrg DeltaTime: " << deltaTime << "at: " << imageTimeStamp << std::endl;
		}

/*
		if(this->datastorage.unrectifiedImageStorage) {
			pthread_mutex_lock(&this->datastorage.storage_mutex);
			leftImage.copyTo(this->datastorage.leftStorageImgUnrect);
			rightImage.copyTo(this->datastorage.rightStorageImgUnrect);
			pthread_mutex_unlock(&this->datastorage.storage_mutex);
		}
*/
		// rectify images, but only if this->synchCheckFlag == 0, because in this case previous image frames are used, which are already rectified
		if (this->synchCheckFlag == 0 && rectifier.rectifierOn)
		{
			GSPretVal = rectifier.rectifyImages(leftImg, rightImg);
			if (GSPretVal != 0)
			{
				printf("Image rectification failed \n");
			}
		}

		if (videostreaming.videoStreamingOn) 
		{
			//is this code needed if its not being used??
			/*
			cv::Mat smallLeft, smallRight;
			cv::pyrDown(leftImg, smallLeft, cv::Size(leftImg.cols/2, leftImg.rows/2));
			cv::pyrDown(rightImg, smallRight, cv::Size(rightImg.cols/2, rightImg.rows/2));

			//place two images side by side
			combined_img.create( cv::Size(smallLeft.cols + smallRight.cols, smallLeft.rows > smallRight.rows ? smallLeft.rows : smallRight.rows),
					CV_MAKETYPE(smallLeft.depth(), 3) );
			cv::Mat imgLeft = combined_img( cv::Rect(0, 0, smallLeft.cols, smallLeft.rows) );
			cv::Mat imgRight = combined_img( cv::Rect(smallLeft.cols, 0, smallRight.cols, smallRight.rows) );

			cv::cvtColor(smallLeft, imgLeft, CV_GRAY2BGR );
			cv::cvtColor(smallRight, imgRight, CV_GRAY2BGR );

			// update default VideoBuffer with recitifed stereo image pair
			printf("Combined Image Size: [%d, %d]\n", combined_img.rows, combined_img.cols);
			videostreaming.update_MatVideoBuffer(defaultVideoBuffer, combined_img);
			*/
		}
	} 

	else 
	{

		stringstream currFileName;
		cv::Mat currCombinedImg;
		double imgTimestep;

		imgTimestep = 1.0E6 / this->cameras.getFrameRate();
		this->currentTime;
		usleep(imgTimestep);

		if(this->camInputImgCounter < this->camInputStartImg) 
		{
			this->camInputImgCounter = this->camInputStartImg;
		}

		currFileName.str("");
		currFileName << this->camInputImgDir << "/LeftRight" << this->camInputImgCounter << ".bmp";

		//end if less than 0 or if greater than specified number
		if(this->camInputFinalImg > 0 && this->camInputImgCounter > this->camInputFinalImg) 
		{
			std::cout << "Last Image read: " << this->camInputImgCounter << std::endl;
			this->shutdownCriterion = true;
		}

		//std::cout << "Reading Image: " << currFileName.str() << std::endl;

		currCombinedImg = cv::imread(currFileName.str());

		if (currCombinedImg.empty()) 
		{
			std::cout << "Empty Image: " << currFileName.str() << std::endl;

			if (this->camInputFinalImg == -1) 
			{
				this->shutdownCriterion = true;
			} 
			else if (this->camInputFinalImg == -10) 
			{
				this->camInputImgCounter = this->camInputStartImg;
				currFileName.str("");
				currFileName << this->camInputImgDir << "/LeftRight" << this->camInputImgCounter << ".bmp";

				std::cout << "Re-Reading Image: " << currFileName.str() << std::endl;
				currCombinedImg = cv::imread(currFileName.str());

				if (currCombinedImg.empty()) 
				{
					std::cout << "Two Empty Images, shutting down: " << currFileName.str();
					this->shutdownCriterion = true;
					return;
				}
			}

		}

        cvtColor(currCombinedImg, currCombinedImg, CV_BGR2GRAY);
		cv::Mat imgLeft = currCombinedImg(cv::Rect(0, 0, leftImg.cols, leftImg.rows));
		cv::Mat imgRight = currCombinedImg(cv::Rect(leftImg.cols, 0, rightImg.cols, rightImg.rows));
		imgLeft.copyTo(leftImg);
		imgRight.copyTo(rightImg);

		currFileName.str("");
		this->camInputImgCounter++;

		this->updateTime();

		this->prevImageTimeStamp = this->imageTimeStamp;
		this->imageTimeStamp = this->testtime;
		double deltaTime = imageTimeStamp - prevImageTimeStamp;

		if (deltaTime > 2*(1000.0 / this->cameras.getFrameRate())) 
		{
//			std::cout << "Lrg DeltaTime: " << deltaTime << "at: " << imageTimeStamp << std::endl;
		}

	}

	struct timespec timeA, timeB;
	clock_gettime(CLOCK_REALTIME, &timeA);

	timestampedImage tImage(this->imageTimeStamp, leftImage, rightImage);

	pthread_mutex_lock(&storageDeque_mutex);
	storageImg_deque.push_front(tImage);
	pthread_mutex_unlock(&storageDeque_mutex);

	clock_gettime(CLOCK_REALTIME, &timeB);
	double save_time_diff = timeDiff(&timeA, &timeB);
	//std::cout << "Time to add to deque: " << save_time_diff << std::endl;
}
コード例 #3
0
void dockingportGSP::captureAndRectifySingleImage(cv::Mat& singleImg) //main function for image capture
{
	if (camera.useCamera)
	{
		GSPretVal = camera.captureOneImage(singleImg);
		if (GSPretVal != 0)
		{
			printf("Capture One Image Failed\n");
		}

		this->updateTime();

		this->prevImageTimeStamp = this->imageTimeStamp;
		this->imageTimeStamp = this->testtime;
		double deltaTime = imageTimeStamp - prevImageTimeStamp;

		if (deltaTime > 2*(1000.0 / this->camera.getFrameRate())) 
		{
	         ///std::cout << "Lrg DeltaTime: " << deltaTime << "at: " << imageTimeStamp << std::endl;
		}
	} 

	else 
	{
		stringstream currFileName;
		cv::Mat currCombinedImg;
		double imgTimestep;

		imgTimestep = 1.0E6 / this->camera.getFrameRate();
		this->currentTime;
		usleep(imgTimestep);

		if(this->camInputImgCounter < this->camInputStartImg) 
		{
			this->camInputImgCounter = this->camInputStartImg;
		}

		currFileName.str("");
		currFileName << this->camInputImgDir << "/SingleImage" << this->camInputImgCounter << ".bmp";

		//end if less than 0 or if greater than specified number
		if(this->camInputFinalImg > 0 && this->camInputImgCounter > this->camInputFinalImg) 
		{
			std::cout << "Last Image read: " << this->camInputImgCounter << std::endl;
			this->shutdownCriterion = true;
		}

		//std::cout << "Reading Image: " << currFileName.str() << std::endl;

		singleImg = cv::imread(currFileName.str());

		if (singleImg.empty()) 
		{
			std::cout << "Empty Image: " << currFileName.str() << std::endl;

			if (this->camInputFinalImg == -1) 
			{
				this->shutdownCriterion = true;
			} 
			else if (this->camInputFinalImg == -10) 
			{
				this->camInputImgCounter = this->camInputStartImg;
				currFileName.str("");
				currFileName << this->camInputImgDir << "/SingleImage" << this->camInputImgCounter << ".bmp";

				std::cout << "Re-Reading Image: " << currFileName.str() << std::endl;
				singleImg = cv::imread(currFileName.str());

				if (singleImg.empty()) 
				{
					std::cout << "Two Empty Images, shutting down: " << currFileName.str();
					this->shutdownCriterion = true;
					return;
				}
			}

		}

	    //cvtColor( singleImg, singleImg, CV_BGR2GRAY );

		currFileName.str("");
		this->camInputImgCounter++;

		this->updateTime();

		this->prevImageTimeStamp = this->imageTimeStamp;
		this->imageTimeStamp = this->testtime;
		double deltaTime = imageTimeStamp - prevImageTimeStamp;

		if (deltaTime > 2*(1000.0 / this->camera.getFrameRate())) 
		{
			//std::cout << "Lrg DeltaTime: " << deltaTime << "at: " << imageTimeStamp << std::endl;
		}

	}

	struct timespec timeA, timeB;
	clock_gettime(CLOCK_REALTIME, &timeA);

	timestampedsingleImage tImage(this->imageTimeStamp, singleImg);

	pthread_mutex_lock(&storageDeque_mutex);
	storageImg_deque.push_front(tImage);
	pthread_mutex_unlock(&storageDeque_mutex);

	clock_gettime(CLOCK_REALTIME, &timeB);
	double save_time_diff = timeDiff(&timeA, &timeB);
	//std::cout << "Time to add to deque: " << save_time_diff << std::endl;

}