Ejemplo n.º 1
0
void VideoDecoder::convertAndPushPicture(const AVFrame* frame) {
	// Allocate a picture to hold the YUV data
	AVFrame* yuvFrame = av_frame_alloc();
	av_frame_copy(yuvFrame, frame);
	yuvFrame->format = DESTINATION_FORMAT;

	av_image_alloc(yuvFrame->data,
				   yuvFrame->linesize,
				   m_status->videoCodecPars.width,
				   m_status->videoCodecPars.height,
				   DESTINATION_FORMAT,
				   1);

	std::unique_ptr<FFMPEGVideoFrame> videoFramePtr(new FFMPEGVideoFrame());

	if (m_status->videoCodecPars.pixel_format == DESTINATION_FORMAT) {
		av_image_copy(yuvFrame->data,
					  yuvFrame->linesize,
					  (const uint8_t**) (frame->data),
					  frame->linesize,
					  DESTINATION_FORMAT,
					  m_status->videoCodecPars.width,
					  m_status->videoCodecPars.height);
	} else {
		// Convert frame to YUV
		sws_scale(
			m_swsCtx,
			(uint8_t const* const*) frame->data,
			frame->linesize,
			0,
			m_status->videoCodecPars.height,
			yuvFrame->data,
			yuvFrame->linesize
		);
	}

	videoFramePtr->id = ++m_frameId;
#if LIBAVCODEC_VERSION_INT > AV_VERSION_INT(58, 3, 102)
	videoFramePtr->frameTime = getFrameTime(frame->best_effort_timestamp, m_status->videoStream->time_base);
#else
	videoFramePtr->frameTime = getFrameTime(av_frame_get_best_effort_timestamp(frame), m_status->videoStream->time_base);
#endif
	videoFramePtr->frame = yuvFrame;

	videoFramePtr->ySize.height = static_cast<size_t>(m_status->videoCodecPars.height);
	videoFramePtr->ySize.width = static_cast<size_t>(m_status->videoCodecPars.width);
	videoFramePtr->ySize.stride = static_cast<size_t>(yuvFrame->linesize[0]);

	// 420P means that the UV channels have half the width and height
	videoFramePtr->uvSize.height = static_cast<size_t>(m_status->videoCodecPars.height / 2);
	videoFramePtr->uvSize.width = static_cast<size_t>(m_status->videoCodecPars.width / 2);
	videoFramePtr->uvSize.stride = static_cast<size_t>(yuvFrame->linesize[1]);

	pushFrame(VideoFramePtr(videoFramePtr.release()));
}
Ejemplo n.º 2
0
void GLImageDrawable::hqXfadeStop()
{
	//qDebug() << "GLVideoDrawable::xfadeStop()";
	m_hqXfadeActive = false;
	m_fadeTick.stop();
	m_fadeValue = 0.0;

	m_oldImage = QImage();
	
	m_frame = VideoFramePtr(new VideoFrame(m_image, 1000/30));
	updateTexture();
	
	//disconnectVideoSource2();
	updateGL();
}
Ejemplo n.º 3
0
void GLImageDrawable::releaseImage()
{
	if(!canReleaseImage())
	{
// 		qDebug() << "GLImageDrawable::releaseImage(): "<<(QObject*)this<<" No image file, cannot release image.";
		return;
	}
	m_releasedImage = true;
	if(m_frame)
	{
		m_allocatedMemory -= m_frame->pointerLength();
		//m_image = QImage();
		m_frame = VideoFramePtr(new VideoFrame());

		#ifdef DEBUG_MEMORY_USAGE
		qDebug() << "GLImageDrawable::releaseImage(): "<<(QObject*)this<<" Released memory, allocated down to:"<<(m_allocatedMemory/1024/1024)<<"MB";
		#endif
	}
}
Ejemplo n.º 4
0
void GLImageDrawable::setImage(const QImage& image, bool insidePaint)
{
	
	//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark1: insidePaint:"<<insidePaint;
	if(m_allocatedMemory > IMAGE_ALLOCATION_CAP_MB*1024*1024 &&
		!liveStatus() &&
		canReleaseImage())
	{
		m_releasedImage = true;
		//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" NOT LOADING";

		#ifdef DEBUG_MEMORY_USAGE
		qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Allocated memory ("<<(m_allocatedMemory/1024/1024)<<"MB ) exceedes" << IMAGE_ALLOCATION_CAP_MB << "MB cap - delaying load until go-live";
		#endif
		return;
	}

	m_releasedImage = false;
	//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark2";
	//image.save("whitedebug.png");


	if(m_frame &&
	   m_frame->isValid() &&
	   xfadeEnabled() &&
	   !insidePaint)
	{
		if(m_hqXfadeEnabled)
		{
			m_oldImage = m_image;
			hqXfadeStart();
		}
		else
		{
			m_frame2 = m_frame;
			//sqDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Starting crossfade with m_frame2";
			//m_frame2 = VideoFramePtr(new VideoFrame(m_image,1000/30));
			updateTexture(true); // true = read from m_frame2
			xfadeStart();
		}
	}


	// Take the memory off the list because when crossfade is done, the frame should get freed
	if(m_frame)
	{
		m_allocatedMemory -= m_frame->pointerLength();
		#ifdef DEBUG_MEMORY_USAGE
		qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Allocated memory down to:"<<(m_allocatedMemory/1024/1024)<<"MB";
		#endif
		//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark4";
	}

	QImage localImage;
	if(m_shadowEnabled || m_borderWidth > 0.001)
		localImage = applyBorder(image);
	else
		localImage = image;
		
// 	QImage bgImage(QSize(1000,750), QImage::Format_ARGB32_Premultiplied);
// 	QBrush bgTexture(QPixmap("ColorTile2.png"));
// 	QPainter bgPainter(&bgImage);
// 	bgPainter.fillRect(bgImage.rect(), bgTexture);
// 	bgPainter.end();
// 	//bgImage = bgImage.convertToFormat(QImage::Format_RGB32);
// 	
// 	localImage = bgImage;
		
	//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark5";

	if(1)
	{
		//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Setting new m_frame";
		m_frame = VideoFramePtr(new VideoFrame(localImage, 1000/30));
	}
	else
	{

		m_frame = VideoFramePtr(new VideoFrame());
		//m_frame->setPixelFormat(QVideoFrame::Format_RGB32);
		//m_frame->setCaptureTime(QTime::currentTime());
		m_frame->setBufferType(VideoFrame::BUFFER_POINTER);
		m_frame->setHoldTime(1000/30);
		m_frame->setSize(localImage.size());
		//m_frame->setDebugPtr(true);

		QImage::Format format = localImage.format();
		m_frame->setPixelFormat(
			format == QImage::Format_ARGB32 ? QVideoFrame::Format_ARGB32 :
			format == QImage::Format_RGB32  ? QVideoFrame::Format_RGB32  :
			format == QImage::Format_RGB888 ? QVideoFrame::Format_RGB24  :
			format == QImage::Format_RGB16  ? QVideoFrame::Format_RGB565 :
			format == QImage::Format_RGB555 ? QVideoFrame::Format_RGB555 :
			//format == QImage::Format_ARGB32_Premultiplied ? QVideoFrame::Format_ARGB32_Premultiplied :
			// GLVideoDrawable doesn't support premultiplied - so the format conversion below will convert it to ARGB32 automatically
			QVideoFrame::Format_Invalid);

		if(m_frame->pixelFormat() == QVideoFrame::Format_Invalid)
		{
			qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<": image was not in an acceptable format, converting to ARGB32 automatically.";
			localImage = localImage.convertToFormat(QImage::Format_ARGB32);
			m_frame->setPixelFormat(QVideoFrame::Format_ARGB32);
		}

		memcpy(m_frame->allocPointer(localImage.byteCount()), (const uchar*)localImage.bits(), localImage.byteCount());
	}

	m_allocatedMemory += localImage.byteCount();
	m_image = image;

	// explicitly release the original image to see if that helps with memory...
	//image = QImage();

 	#ifdef DEBUG_MEMORY_USAGE
 	qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Allocated memory up to:"<<(m_allocatedMemory/1024/1024)<<"MB";
 	#endif
	//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark7";
	
	// Don't updateTexture if m_hqXfadeActive because
	// we'll updateTexture inside hqXfadeTick with a blended image
	if(!m_hqXfadeActive)
		updateTexture();

// 	QString file = QString("debug-%1-%2.png").arg(metaObject()->className()).arg(QString().sprintf("%p",((void*)this)));
// 	m_image.save(file);
// 	qDebug() << "QImageDrawable::setImage: "<<(QObject*)this<<": Wrote: "<<file;

	if(fpsLimit() <= 0.0 &&
	   !insidePaint)
	{
		//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" mark8";
		updateGL();
		if(!liveStatus())
			m_needUpdate = true;
	}

	updateShadow();
		

	//qDebug() << "GLImageDrawable::setImage(): "<<(QObject*)this<<" Set image size:"<<m_frame->image().size();

	// TODO reimp so this code works
// 	if(m_visiblePendingFrame)
// 	{
// 		//qDebug() << "GLVideoDrawable::frameReady: "<<this<<", pending visible set, calling setVisible("<<m_tempVisibleValue<<")";
// 		m_visiblePendingFrame = false;
// 		GLDrawable::setVisible(m_tempVisibleValue);
// 	}
}
Ejemplo n.º 5
0
void GLImageDrawable::hqXfadeTick(bool callUpdate)
{
	// Only start the counter once we actually get the first 'tick' of the timer
	// because there may be a significant delay between the time the timer is started
	// and the first 'tick' is received, which (if we started the counter above), would
	// cause a noticable and undesirable jump in the opacity
	if(!m_fadeTimeStarted)
	{
		m_fadeTime.start();
		m_fadeTimeStarted = true;
	}

	double elapsed = m_fadeTime.elapsed() + (m_startOpacity * xfadeLength());
	double progress = ((double)elapsed) / ((double)xfadeLength());
	//m_fadeValue = m_fadeCurve.valueForProgress(progress);
	m_fadeValue = progress; //m_fadeCurve.valueForProgress(progress);
	//qDebug() << "GLVideoDrawable::xfadeTick(): elapsed:"<<elapsed<<", start:"<<m_startOpacity<<", progress:"<<progress<<", fadeValue:"<<m_fadeValue;

	if(elapsed >= xfadeLength())
		hqXfadeStop();
	else
	{
		QSize size = m_image.rect().united(m_oldImage.rect()).size();
//		QImage intermImage(size, QImage::Format_ARGB32_Premultiplied);
// 		QPainter p(&intermImage);
// 		
// 		p.setOpacity(m_fadeValue);
// 		p.drawImage(0,0,m_image);
// 		p.setOpacity(1-m_fadeValue);
// 		p.drawImage(0,0,m_oldImage);
// 		p.end();
// 		
// 		QImage alphaImage(intermImage.size(), intermImage.format());
// 		QPainter alphaPainter(&alphaImage);
// 
// 		QImage a2 = m_image.alphaChannel();
// 		QImage a1 = m_oldImage.alphaChannel();
// 		alphaPainter.setOpacity(m_fadeValue);
// 		alphaPainter.drawImage(0,0,a2);
// 		alphaPainter.setOpacity(1.0-m_fadeValue);
// 		alphaPainter.drawImage(0,0,a1);
// 		
// 		alphaPainter.end();
// 		
// 		//intermImage = intermImage.convertToFormat(QImage::Format_ARGB32_Premultiplied);
// 		forceSetAlphaChannel(intermImage, alphaImage);
		
		
		double opac = m_fadeValue;
		QImage result = m_image;
		QImage over   = m_oldImage;
		
		QColor alpha = Qt::black;
		alpha.setAlphaF(opac);
		
		QPainter p;
		
		// This method pulled from https://projects.kde.org/projects/playground/artwork/smaragd/repository/revisions/c71af7bc12611e462426e75c3d5793a87b67f57e/diff
		// per Christoph Feck <*****@*****.**> 
		p.begin(&over);
		p.setCompositionMode(QPainter::CompositionMode_DestinationIn);
		p.fillRect(result.rect(), alpha);
		p.end();
		
		p.begin(&result);
		p.setCompositionMode(QPainter::CompositionMode_DestinationOut);
		p.fillRect(result.rect(), alpha);
		
		p.setCompositionMode(QPainter::CompositionMode_Plus);
		p.drawImage(0, 0, over);
		p.end();
		
		QImage intermImage = result;
		
		
		//qDebug() << "GLImageDrawable::hqXfadeTick: interm size:"<<size<<", fadeValue:"<<m_fadeValue;
		//intermImage.save(tr("hqx-interm-%1.jpg").arg((int)(m_fadeValue*10.)));
		
		m_frame = VideoFramePtr(new VideoFrame(intermImage.convertToFormat(QImage::Format_ARGB32), 1000/30));
		updateTexture();
	}

	if(callUpdate)
		updateGL();
}