Exemplo n.º 1
0
std::shared_ptr<Frame> CFFmpegImage::ReadFrame()
{
  AVFrame* avframe = ExtractFrame();
  if (avframe == nullptr)
    return nullptr;
  std::shared_ptr<Frame> frame(new Frame());
  frame->m_delay = (unsigned int)av_frame_get_pkt_duration(avframe);
  frame->m_pitch = avframe->width * 4;
  frame->m_pImage = new unsigned char[avframe->height * frame->m_pitch];
  DecodeFrame(avframe, avframe->width, avframe->height, frame->m_pitch, frame->m_pImage);
  av_frame_free(&avframe);
  return frame;
}
Exemplo n.º 2
0
bool CFFmpegImage::LoadImageFromMemory(unsigned char* buffer, unsigned int bufSize,
                                      unsigned int width, unsigned int height)
{
    
  if (!Initialize(buffer, bufSize))
  {
    //log
    return false;
  }

  av_frame_free(&m_pFrame);
  m_pFrame = ExtractFrame();

  return !(m_pFrame == nullptr);
}
Exemplo n.º 3
0
HRESULT CFLICStream::FillBuffer(IMediaSample* pSample)
{
	HRESULT hr;

	{
		CAutoLock cAutoLockShared(&m_cSharedState);

        if(m_rtPosition >= m_rtStop)
			return S_FALSE;

		BYTE* pDataIn = m_pFrameBuffer;
		BYTE* pDataOut = NULL;
		if(!pDataIn || FAILED(hr = pSample->GetPointer(&pDataOut)) || !pDataOut)
			return S_FALSE;

		AM_MEDIA_TYPE* pmt;
		if(SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt)
		{
			CMediaType mt(*pmt);
			SetMediaType(&mt);

			DeleteMediaType(pmt);
		}

		int w, h, bpp;
		if(m_mt.formattype == FORMAT_VideoInfo)
		{
			w = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biWidth;
			h = abs(((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biHeight);
			bpp = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biBitCount;
		}
		else if(m_mt.formattype == FORMAT_VideoInfo2)
		{
			w = ((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biWidth;
			h = abs(((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biHeight);
			bpp = ((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biBitCount;
		}
		else
		{
			return S_FALSE;
		}

		int pitchIn = m_hdr.x;
		int pitchOut = w*bpp>>3;

		int nFrame = m_rtPosition / m_AvgTimePerFrame; // (int)(1.0 * m_rtPosition / m_AvgTimePerFrame + 0.5);

		{
			SeekToNearestKeyFrame(nFrame);

			while(m_nLastFrameNum < nFrame && !m_bFlushing)
				ExtractFrame(++m_nLastFrameNum);

			for(int y = 0, p = min(pitchIn, pitchOut); 
				y < h; 
				y++, pDataIn += pitchIn, pDataOut += pitchOut)
			{
				BYTE* src = pDataIn;
				BYTE* end = src + p;
				DWORD* dst = (DWORD*)pDataOut;
				while(src < end) *dst++ = m_pPalette[*src++];
			}
		}

		pSample->SetActualDataLength(pitchOut*h);

		REFERENCE_TIME rtStart, rtStop;
        // The sample times are modified by the current rate.
        rtStart = static_cast<REFERENCE_TIME>(m_rtSampleTime / m_dRateSeeking);
        rtStop  = rtStart + static_cast<int>(m_AvgTimePerFrame / m_dRateSeeking);
        pSample->SetTime(&rtStart, &rtStop);

        m_rtSampleTime += m_AvgTimePerFrame;
        m_rtPosition += m_AvgTimePerFrame;
	}

	pSample->SetSyncPoint(TRUE);

	if(m_bDiscontinuity) 
    {
		pSample->SetDiscontinuity(TRUE);
		m_bDiscontinuity = FALSE;
	}

	return S_OK;
}