Example #1
0
bool QVideoDecoder::load(const QString & filename)
{
	//if(!QFile::exists(filename))
	//	return false;

	//QMutexLocker locker(&mutex);
    static int debugCounter = 0;

	 AVInputFormat *inFmt = NULL;
	 AVFormatParameters formatParams;
	memset(&formatParams, 0, sizeof(AVFormatParameters));

	 QString fileTmp = filename;
	 //if(debugCounter ++ <= 0)
		//fileTmp = "vfwcap://0";
	if(fileTmp == "C:/dummy.txt")
		fileTmp = "vfwcap://0";
	qDebug() << "[DEBUG] QVideoDecoder::load(): starting with fileTmp:"<<fileTmp;
	 bool customInputFormat = false;
	 if(fileTmp.indexOf("://") > -1)
	 {
		 QStringList list = fileTmp.split("://");
		 if(list.size() == 2)
		 {
			 qDebug() << "[DEBUG] QVideoDecoder::load(): input format args:"<<list;
			 fileTmp = list[1];
			 if(fileTmp.isEmpty())
				fileTmp = "0";
			 avdevice_register_all();
			 QString fmt = list[0];
			 if(fmt == "cap")
				fmt = "vfwcap";
			 inFmt = av_find_input_format(qPrintable(list[0]));
			 if( !inFmt )
			 {
				   qDebug() << "[ERROR] QVideoDecoder::load(): Unable to find input format:"<<list[0];
				   return -1;
			 }


			 formatParams.time_base.num = 1;
			 formatParams.time_base.den = 25;

			 customInputFormat = true;
		}

	}




	// Open video file
	 //
	if(av_open_input_file(&m_av_format_context, qPrintable(fileTmp), inFmt, 0, &formatParams) != 0)
	//if(av_open_input_file(&m_av_format_context, "1", inFmt, 0, NULL) != 0)
	{
		qDebug() << "[WARN] QVideoDecoder::load(): av_open_input_file() failed, fileTmp:"<<fileTmp;
		return false;
	}

	// Retrieve stream information
	if(!customInputFormat)
	    if(av_find_stream_info(m_av_format_context) < 0)
	    {
		    qDebug() << "[WARN] QVideoDecoder::load(): av_find_stream_info() failed.";
		    return false;
	    }


	int i;

	// Find the first video stream
	m_video_stream = -1;
	m_audio_stream = -1;
	for(i = 0; i < m_av_format_context->nb_streams; i++)
	{
		if(m_av_format_context->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO)
		{
			m_video_stream = i;
		}
		if(m_av_format_context->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO)
		{
			m_audio_stream = i;
		}
	}
	if(m_video_stream == -1)
	{
		qDebug() << "[WARN] QVideoDecoder::load(): Cannot find video stream.";
		return false;
	}

	// Get a pointer to the codec context for the video and audio streams
	m_video_codec_context = m_av_format_context->streams[m_video_stream]->codec;
// 	m_video_codec_context->get_buffer = our_get_buffer;
// 	m_video_codec_context->release_buffer = our_release_buffer;

	// Find the decoder for the video stream
	m_video_codec =avcodec_find_decoder(m_video_codec_context->codec_id);
	if(m_video_codec == NULL)
	{
		qDebug() << "[WARN] QVideoDecoder::load(): avcodec_find_decoder() failed.";
		return false;
	}

	// Open codec
	if(avcodec_open(m_video_codec_context, m_video_codec) < 0)
	{
		qDebug() << "[WARN] QVideoDecoder::load(): avcodec_open() failed.";
		return false;
	}

	// Allocate video frame
	m_av_frame = avcodec_alloc_frame();

	// Allocate an AVFrame structure
	m_av_rgb_frame =avcodec_alloc_frame();
	if(m_av_rgb_frame == NULL)
	{
		qDebug() << "[WARN] QVideoDecoder::load(): avcodec_alloc_frame() failed.";
		return false;
	}

	// Determine required buffer size and allocate buffer
	//int num_bytes = avpicture_get_size(PIX_FMT_RGB32, m_video_codec_context->width, m_video_codec_context->height);
	int num_bytes = avpicture_get_size(RAW_PIX_FMT, m_video_codec_context->width, m_video_codec_context->height);


	m_buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));

	// Assign appropriate parts of buffer to image planes in pFrameRGB
	// Note that pFrameRGB is an AVFrame, but AVFrame is a superset of AVPicture
	//avpicture_fill((AVPicture *)m_av_rgb_frame, m_buffer, PIX_FMT_RGB32, m_video_codec_context->width, m_video_codec_context->height);
	avpicture_fill((AVPicture *)m_av_rgb_frame, m_buffer, RAW_PIX_FMT, m_video_codec_context->width, m_video_codec_context->height);

	if(m_audio_stream != -1)
	{
		m_audio_codec_context = m_av_format_context->streams[m_audio_stream]->codec;

// 		// Set audio settings from codec info
// 		wanted_spec.freq = m_audio_codec_context->sample_rate;
// 		wanted_spec.format = AUDIO_S16SYS;
// 		wanted_spec.channels = m_audio_codec_context->channels;
// 		wanted_spec.silence = 0;
// 		wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
// 		//wanted_spec.callback = audio_callback;
// 		wanted_spec.userdata = m_audio_codec_context;
//
// 		if(SDL_OpenAudio(&wanted_spec, &spec) < 0)
// 		{
// 			//error
// 			return false;
// 		}
//
		m_audio_codec = avcodec_find_decoder(m_audio_codec_context->codec_id);
		if(!m_audio_codec)
		{
			//unsupported codec
			return false;
		}
		avcodec_open(m_audio_codec_context, m_audio_codec);
	}

	m_timebase = m_av_format_context->streams[m_video_stream]->time_base;

	calculateVideoProperties();

	m_initial_decode = true;

	decode();

	m_video->m_video_loaded = true;

	return true;
}
Example #2
0
int VideoThread::initVideo()
{
	//qDebug() << "VideoThread::initVideo()";
// 	avcodec_init();
// 	avcodec_register_all();
// 	avdevice_register_all();
// 	av_register_all();

	QString fileTmp = m_videoFile;
////	qDebug() << "[DEBUG] VideoThread::load(): input file:"<<fileTmp;

	
	AVInputFormat *inFmt = NULL;
	AVFormatParameters formatParams;
	memset(&formatParams, 0, sizeof(AVFormatParameters));

	//qDebug() << "[DEBUG] VideoThread::load(): starting with fileTmp:"<<fileTmp;
	
	// Open video file
	//
	int res = av_open_input_file(&m_av_format_context, qPrintable(fileTmp), inFmt, 0, &formatParams);
	if(res != 0)
	//if(av_open_input_file(&m_av_format_context, "1", inFmt, 0, NULL) != 0)
	{
		qDebug() << "[ERROR] VideoThread::load(): av_open_input_file() failed, fileTmp:"<<fileTmp<<", res:"<<res;
		return false;
	}

	if(av_find_stream_info(m_av_format_context) < 0)
	{
		qDebug() << "[ERROR] VideoThread::load(): av_find_stream_info() failed.";
		return false;
	}
	
	//dump_format(m_av_format_context, 0, qPrintable(m_cameraFile), 0);
	qDebug() << "[DEBUG] dump_format():";
	dump_format(m_av_format_context, 0, qPrintable(fileTmp), false);

	uint i;

	// Find the first video stream
	m_video_stream = -1;
	m_audio_stream = -1;
	for(i = 0; i < m_av_format_context->nb_streams; i++)
	{
		if(m_av_format_context->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO)
		{
			m_video_stream = i;
		}
		if(m_av_format_context->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO)
		{
			m_audio_stream = i;
		}
	}
	if(m_video_stream == -1)
	{
		qDebug() << "[ERROR] VideoThread::load(): Cannot find video stream.";
		return false;
	}

	// Get a pointer to the codec context for the video and audio streams
	m_video_codec_context = m_av_format_context->streams[m_video_stream]->codec;
// 	m_video_codec_context->get_buffer = our_get_buffer;
// 	m_video_codec_context->release_buffer = our_release_buffer;

	// Find the decoder for the video stream
	m_video_codec = avcodec_find_decoder(m_video_codec_context->codec_id);
	if(m_video_codec == NULL)
	{
		qDebug() << "[ERROR] VideoThread::load(): avcodec_find_decoder() failed for codec_id:" << m_video_codec_context->codec_id;
		return false;
	}

	// Open codec
	if(avcodec_open(m_video_codec_context, m_video_codec) < 0)
	{
		qDebug() << "[ERROR] VideoThread::load(): avcodec_open() failed.";
		return false;
	}

	// Allocate video frame
	m_av_frame = avcodec_alloc_frame();

	// Allocate an AVFrame structure
	m_av_rgb_frame =avcodec_alloc_frame();
	if(m_av_rgb_frame == NULL)
	{
		qDebug() << "[ERROR] VideoThread::load(): avcodec_alloc_frame() failed.";
		return false;
	}

	// Determine required buffer size and allocate buffer
	//int num_bytes = avpicture_get_size(PIX_FMT_RGB565, m_video_codec_context->width, m_video_codec_context->height);
	int num_bytes = avpicture_get_size(PIX_FMT_RGB32, m_video_codec_context->width, m_video_codec_context->height);

	m_buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));

	// Assign appropriate parts of buffer to image planes in pFrameRGB
	// Note that pFrameRGB is an AVFrame, but AVFrame is a superset of AVPicture
	//avpicture_fill((AVPicture *)m_av_rgb_frame, m_buffer, PIX_FMT_RGB565,
	avpicture_fill((AVPicture *)m_av_rgb_frame, m_buffer, PIX_FMT_RGB32,
					m_video_codec_context->width, m_video_codec_context->height);

	if(m_audio_stream != -1)
	{
		m_audio_codec_context = m_av_format_context->streams[m_audio_stream]->codec;

		m_audio_codec = avcodec_find_decoder(m_audio_codec_context->codec_id);
		if(!m_audio_codec)
		{
			//unsupported codec
			return false;
		}
		avcodec_open(m_audio_codec_context, m_audio_codec);
	}

	m_timebase = m_av_format_context->streams[m_video_stream]->time_base;

	m_readTimer = new QTimer();
	//connect(m_readTimer, SIGNAL(timeout()), this, SLOT(readFrame()));
	int ts = 1000/30; //int(m_timebase.den);
	//qDebug() << "VideoThread::initVideo: setting interval to "<<ts<<", den:"<<m_timebase.den<<", num:"<<m_timebase.num;
	m_readTimer->setInterval(ts);
	
	calculateVideoProperties();

	m_inited = true;
	return 0;
}