Exemplo n.º 1
0
	bitmap_ptr finalize(bool showProgress = true) {
		if (chunks.empty()) {
			return nullptr;
		}
		if (showProgress) {
			std::cout << "Adding all accepted chunks to the final image\n";
		}
		
		const auto it = chunks.begin();
		bitmap_ptr firstChunk = GenericLoader(*it);
		
		auto currentHeight = 0;

		const auto type = FreeImage_GetImageType(firstChunk.get());
		
		const auto bpp = FreeImage_GetBPP(firstChunk.get());
		
		bitmap_ptr finalImage(FreeImage_AllocateT(type, width, height, bpp));

		auto RGBChunkWorker = [=, &finalImage, &currentHeight](const std::string& el)
		{
			bitmap_ptr chunk = GenericLoader(el);
			auto chunkHeight = FreeImage_GetHeight(chunk.get());
			auto chunk_img = FreeImage_Copy(chunk.get(), 0, 0, this->width, chunkHeight);
			if (chunk_img) {
				FreeImage_Paste(finalImage.get(), chunk_img, 0, currentHeight, 256);
			}
			currentHeight += chunkHeight;
		};

		std::for_each(chunks.begin(), chunks.end(), RGBChunkWorker);
		return finalImage;
	}
Exemplo n.º 2
0
QString MomentumImageHelper::fromText(QString /*text*/)
{
    qDebug()<<qApp->font();

    bb::device::DisplayInfo displayInfo;
    //int w=displayInfo.pixelSize().width(), h=displayInfo.pixelSize().height();
    int w = 320, h = 100;

    QImage finalImage(w, h, QImage::Format_ARGB32_Premultiplied);

    QPainter textPainter;
    textPainter.begin(&finalImage);
    qDebug()<<"1";
    finalImage.fill(Qt::white);
    textPainter.setPen(QPen(Qt::red, 12, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
    qDebug()<<"Pen set";
    qDebug()<<textPainter.font();
    //qDebug()<<textPainter.fontInfo().family();
    //textPainter.drawLine(QPoint(0, 0), QPoint(320, 100));
    QFont arial("Arial", 10, 100, TRUE);
    textPainter.setFont(arial);
    qDebug()<<textPainter.font();

    //textPainter.drawText(QRectF(0, 0, 320, 100), Qt::AlignCenter, "I");
    qDebug()<<"2";

    finalImage.save(QDir::home().absoluteFilePath("image.png"), "png");

    return QDir::home().absoluteFilePath("image.png");
    //return bb::ImageData::fromPixels(finalImage.bits(), bb::PixelFormat::RGBA_Premultiplied, w, h, finalImage.bytesPerLine());
}
void BachelorThesis::loadImage() 
{
	if( videoReader.isOpen() && !isVideoPaused )
	{
		timer.start();
		
		cv::gpu::GpuMat processedImage;
		cv::Mat unProcessedImage;

		// loading new frames. the amount of skipped frames is indicated by playbackSpeed
		for( int i = 0; i < playbackSpeed; i++ )
		{
			if( videoReader.selectedType == VideoReader::Type::CPU || videoReader.selectedType == VideoReader::Type::LIVE )
			{
				cv::Mat  temp = videoReader.getNextImage();
				
				cv::cvtColor( temp, temp, CV_BGR2RGBA );
				unProcessedImage = temp;
				if( originalImage.cols != temp.cols || originalImage.rows != temp.rows )
				{
					originalImage = cv::gpu::GpuMat( 640, 480, CV_8UC4 );
				}

				originalImage.upload( temp );
			} else if ( videoReader.selectedType == VideoReader::Type::GPU )
			{
				originalImage = videoReader.getNextImage_GPU();
			}
			
		}

		// get the selected area
		QPoint maxSize( originalImage.cols, originalImage.rows );
		QRect roi = roiSelector->geometry();

		// adjust the roisize according to the maximum dimensions
		adjustRoiSize( roiSelector->geometry(), roi, maxSize );

		// initializes a new section gpumat with the size of the roi and the imagetype of the incoming image
		cv::gpu::GpuMat section( roi.width(), roi.height(), originalImage.type() );
		// init a cv::Rect with the properties of the ROI
		cv::Rect cvSelectedRect = cv::Rect( roi.x(), roi.y(), roi.width(), roi.height() );
		// a new local copy of the current image

		cv::gpu::GpuMat tempMat(originalImage );
		// select a part of this new image ( position and size is stored in the passed cv::Rect) and copy this to the new image
		tempMat(cvSelectedRect).copyTo( section );
		// add the cropped image to the processing pipeline
		pipeline->addImage( &section );
		// start the pipeline ( do the processing )
		pipeline->start();
		// get the processed image
		processedImage = pipeline->getFinishedImage();
		// make a local copy of the entire unprocessed original image
		cv::gpu::GpuMat finalImage( originalImage );
		// copy the processed image into the original image, exactly at the location of the ROI
		processedImage.copyTo( finalImage( cvSelectedRect ) );
		// convert the cv::gpu::GpuMat into a QPixmap
		
		QPixmap imagePixmap = QPixmap::fromImage( this->mat2QImage( cv::Mat( finalImage ) ) );
		// display the QPixmap onto the label
		ui.videoLabel->setPixmap( imagePixmap );
		ui.videoLabel->setMaximumHeight( imagePixmap.width() );
		ui.videoLabel->setMaximumWidth( imagePixmap.height() );
		ui.videoLabel->adjustSize();

		QPixmap originalImagePixmap = QPixmap::fromImage( this->mat2QImage( cv::Mat( unProcessedImage ) ) );
		ui.originalVideoLabel->setPixmap( originalImagePixmap );
		ui.originalVideoLabel->setMaximumHeight( originalImagePixmap.width() );
		ui.originalVideoLabel->setMaximumWidth( originalImagePixmap.height() );
		ui.originalVideoLabel->adjustSize();
		timer.stop();
		timer.store();
		//std::cout << "it took by average:" << timer.getAverageTimeStdString() << "ms." << std::endl;
		//std::cout << "lates was: " << timer.getLatestStdString() << "ms." << std::endl;
		QString elapsed;
		elapsed.append( QString( "%1" ).arg( videoReader.getNormalizedProgress() ) );
		ui.progressBar->setValue( videoReader.getCurrentFrameNr() );
	}
	else
	{
		// no new frame. do nothings
	}
}
Exemplo n.º 4
0
	bitmap_ptr finalize(bool showProgress = false) {
		if (chunks.empty()) {
			return nullptr;
		}
		if (showProgress) {
			std::cout << "Adding all accepted chunks to the final image\n";
		}

		const auto it = chunks.begin();
		
		bitmap_ptr firstChunk = GenericLoader(*it);
		
		const auto type = FreeImage_GetImageType(firstChunk.get());
		
		bitmap_ptr finalImage(FreeImage_Copy(firstChunk.get(), 0, height, width, 0));

		auto RGBChunkWorker = [=, &finalImage](const std::string& el)
		{
			bitmap_ptr chunk = GenericLoader(el);
			auto chunkHeight = FreeImage_GetHeight(chunk.get());
			for (unsigned int y = 0; y < chunkHeight; ++y) {
				auto srcbits = reinterpret_cast<FIRGBF *>(FreeImage_GetScanLine(chunk.get(), y));
				auto dstbits = reinterpret_cast<FIRGBF *>(FreeImage_GetScanLine(finalImage.get(), y));

				for (unsigned int x = 0; x < this->width; ++x) {
					dstbits[x].red += srcbits[x].red;
					dstbits[x].blue += srcbits[x].blue;
					dstbits[x].green += srcbits[x].green;
				}
			}
		};

		auto RGBAChunkWorker = [=, &finalImage](const std::string& el)
		{
			bitmap_ptr chunk = GenericLoader(el);
			auto chunkHeight = FreeImage_GetHeight(chunk.get());
			for (unsigned int y = 0; y < chunkHeight; ++y) {
				const auto srcbits = reinterpret_cast<FIRGBAF *>(FreeImage_GetScanLine(chunk.get(), y));
				auto dstbits = reinterpret_cast<FIRGBAF *>(FreeImage_GetScanLine(finalImage.get(), y));

				for (unsigned int x = 0; x < this->width; ++x) {
					dstbits[x].red += srcbits[x].red;
					dstbits[x].blue += srcbits[x].blue;
					dstbits[x].green += srcbits[x].green;
					dstbits[x].alpha += srcbits[x].alpha;
				}
			}
		};

		auto alphaChunksWorker = [this, &finalImage](const std::string& el)
		{
			bitmap_ptr chunk = GenericLoader(el);
			auto chunkHeight = FreeImage_GetHeight(chunk.get());
			for (unsigned int y = 0; y < chunkHeight; ++y) {
				const auto srcbits = reinterpret_cast<FIRGBAF *>(FreeImage_GetScanLine(chunk.get(), y));
				auto dstbits = reinterpret_cast<FIRGBAF *>(FreeImage_GetScanLine(finalImage.get(), y));

				for (unsigned int x = 0; x < this->width; ++x) {
					dstbits[x].alpha += srcbits[x].red + srcbits[x].blue + srcbits[x].green;
				}
			}
		};

		if (type == FIT_RGBF)
			std::for_each(std::next(chunks.begin()), chunks.end(), RGBChunkWorker);
		else if (type == FIT_RGBAF)
			std::for_each(std::next(chunks.begin()), chunks.end(), RGBAChunkWorker);
		std::for_each(alphaChunks.begin(), alphaChunks.end(), alphaChunksWorker);

		return finalImage;
	}
Exemplo n.º 5
0
void VideoGeneration::on_generateVideoPushButton_clicked()
{
	//string camPath = "C:\\Users\\dehandecroos\\Desktop\\Videos\\PRG28.avi";
	QString profileId = ui.profileName_lineEdit->text();


	
	string cameraIds[] = { 
		"camera_node_6_log", 
		"camera_node_1_log", 
		"camera_node_28_log",
		"camera_node_23_log"
	};
	int cameraIdsSize = sizeof(cameraIds) / sizeof(*cameraIds);
	string finalJoinQuery = "";
	int i = 1;
	for (string cameraId : cameraIds)
	{
		finalJoinQuery += "select * from " + cameraId + " where profile_id='" + profileId.toStdString() + "'";
		if (i++ != cameraIdsSize) {
			finalJoinQuery += "union ";
		}
	}
	finalJoinQuery += "order by TimeStamp";
	
	struct CameraTimeStamp{
		string cameraId;
		double timestamp;
	};
	
	
	stmt->execute("USE camera");
	ResultSet *timeStampsForProfile = stmt->executeQuery(finalJoinQuery);
	vector<CameraTimeStamp> timeStamps;
	
	while (timeStampsForProfile->next())
	{
		CameraTimeStamp timeStamp;
		timeStamp.cameraId = timeStampsForProfile->getString("Video_ID");
		timeStamp.timestamp = timeStampsForProfile->getDouble("TimeStamp");
		timeStamps.push_back(timeStamp);
	}

	
	vector<Mat> video;
	for (CameraTimeStamp ts : timeStamps)
	{
		string camPath = "C:\\AdaptiveCameraNetworkPack\\Videos\\";
		string camId = ts.cameraId;
		camPath += "PRG" + camId + ".avi";
		VideoCapture cam;
		cam.open(camPath);
		int frameRate = cam.get(CV_CAP_PROP_FPS);

		int minutes = (int)ts.timestamp;
		int seconds = (int)((ts.timestamp-(double)minutes)*100.0);
		int milis = ((ts.timestamp - (double)minutes)*100.0-seconds)*1000;
		
		int milliseconds = (minutes * 60 + seconds) * 1000 + milis;
		qDebug() << "Extracted Frames for time " + QString::number(ts.timestamp) + ", in camera " + QString::fromStdString(camId);
		cam.set(CV_CAP_PROP_POS_MSEC, milliseconds);
		
		
		for (int frameCount = 0; frameCount < frameRate; frameCount++)
		{
			Mat frame;
			cam >> frame;
			if (frame.empty())
			{
				break;
			}
			int fontFace = FONT_HERSHEY_SIMPLEX;
			double fontScale = 1;
			int thickness = 3;
			cv::Point textOrg1(10, 50);
			putText(frame, "CAM:" + ts.cameraId, textOrg1, fontFace, fontScale, Scalar::all(0),2);
			cv::Point textOrg2(500, 50);
			video.push_back(frame);
		}
		
		

		//VideoCapture

	}

	if (video.size() == 0){
		QImage finalImage(ui.lblOutput->width(), ui.lblOutput->width(), QImage::Format_RGB888);
		QPainter qp(&finalImage);
		qp.setBrush(Qt::black);
		qp.setPen(Qt::red);
		qp.setFont(QFont("Times", 12, QFont::Bold));
		qp.drawText(finalImage.rect(), Qt::AlignCenter, "NO VIDEO FOR "+ profileId);
		ui.lblOutput->setPixmap(QPixmap::fromImage(finalImage));
	}
	else
	{
		for (Mat frameZ : video)
		{
			Mat frameForQimage;
			cvtColor(frameZ, frameForQimage, CV_BGR2RGB);
			QImage outImage((uchar*)frameForQimage.data, frameForQimage.cols, frameForQimage.rows, frameZ.step, QImage::Format_RGB888);
			ui.lblOutput->setPixmap(QPixmap::fromImage(outImage));
			imshow("Output", frameZ);
			cvWaitKey(1);

		}
	}
}
Exemplo n.º 6
0
void ScanDialog::slotFinalImage(QImage *image, ImgScanInfo *)
{
    emit finalImage(*image, nextId());
}