Example #1
0
// Signals the mixer or presenter to release the interface pointers obtained from the EVR.
HRESULT STDMETHODCALLTYPE EVRCustomPresenter::ReleaseServicePointers()
{
    Log("EVRCustomPresenter::ReleaseServicePointers");

    HRESULT hr = S_OK;

    // Enter shut-down state
    {
        CAutoLock lock(this);
        m_RenderState = RENDER_STATE_SHUTDOWN;
    }

    // Flush any samples that were scheduled.
    Flush();

    // Clear the media type and release related resources (surfaces, etc).
    SetMediaType(NULL);

    // Release all services that were acquired from InitServicePointers.
    SAFE_RELEASE(m_pClock);
    SAFE_RELEASE(m_pMixer);
    SAFE_RELEASE(m_pMediaEventSink);

    return hr;
}
Example #2
0
CMediaSample::Release()
{
	/* Decrement our own private reference count */
	LONG lRef;
	if (m_cRef == 1) {
		lRef = 0;
		m_cRef = 0;
	} else {
		lRef = InterlockedDecrement(&m_cRef);
	}
	ASSERT(lRef >= 0);

	DbgLog((LOG_MEMORY,3,TEXT("    Unknown %X ref-- = %d"),
		this, m_cRef));

	/* Did we release our final reference count */
	if (lRef == 0) {
		/* Free all resources */
		if (m_dwFlags & Sample_TypeChanged) {
			SetMediaType(NULL);
		}
		ASSERT(m_pMediaType == NULL);
		m_dwFlags = 0;
		m_dwTypeSpecificFlags = 0;
		m_dwStreamId = AM_STREAM_MEDIA;

		/* This may cause us to be deleted */
		// Our refcount is reliably 0 thus no-one will mess with us
		m_pAllocator->ReleaseBuffer(this);
	}
	return (ULONG)lRef;
}
Example #3
0
    STDMETHODIMP MyPin::Receive(IMediaSample* pSample)
    {
        CAutoLock receiveLock(&m_receiveMutex);

        {
            CAutoLock objectLock(this);

            if (m_state == State_Stopped)
                return VFW_E_WRONG_STATE;

            ReturnIfNotEquals(S_OK, CBaseInputPin::Receive(pSample));

            if (m_SampleProps.dwSampleFlags & AM_SAMPLE_TYPECHANGED)
            {
                // TODO: don't recreate the device when possible
                m_renderer.Finish(false, &m_bufferFilled);
                ReturnIfFailed(SetMediaType(static_cast<CMediaType*>(m_SampleProps.pMediaType)));
            }

            if (m_eosUp)
                return S_FALSE;
        }

        // Raise Receive() thread priority, once.
        if (m_hReceiveThread != GetCurrentThread())
        {
            m_hReceiveThread = GetCurrentThread();
            if (GetThreadPriority(m_hReceiveThread) < THREAD_PRIORITY_ABOVE_NORMAL)
                SetThreadPriority(m_hReceiveThread, THREAD_PRIORITY_ABOVE_NORMAL);
        }

        // Push() returns 'false' in case of interruption.
        return m_renderer.Push(pSample, m_SampleProps, &m_bufferFilled) ? S_OK : S_FALSE;
    }
Example #4
0
//---------------------------------------------------------------------------
// Code
//---------------------------------------------------------------------------
XnVideoStream::XnVideoStream(HRESULT *phr, XnVideoSource *pParent, xn::ImageGenerator& imageGen, LPCWSTR pPinName) :
	CSourceStream(NAME("Video Stream"), phr, pParent, pPinName),
	m_imageGen(imageGen),
	m_bFlipVertically(FALSE),
	m_nPreferredMode(-1),
	m_Dump(pParent->m_Dump)
{
	ASSERT(phr);

	xnFPSInit(&m_FPS, 90);

	XnUInt32 nSupportedModes = m_imageGen.GetSupportedMapOutputModesCount();
	XnMapOutputMode* aOutputModes = new XnMapOutputMode[nSupportedModes];

	XnStatus nRetVal = m_imageGen.GetSupportedMapOutputModes(aOutputModes, nSupportedModes);
	if (nRetVal != XN_STATUS_OK)
	{
		*phr = E_UNEXPECTED;
		delete[] aOutputModes;
		return;
	}

	nRetVal = m_aSupportedModes.Reserve(nSupportedModes);
	if (nRetVal != XN_STATUS_OK)
	{
		*phr = E_UNEXPECTED;
		delete[] aOutputModes;
		return;
	}

	XnBool bRGB = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_RGB24);
	XnBool bMJPEG = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_MJPEG);
	Mode mode;

	for (XnUInt32 i = 0; i < nSupportedModes; ++i)
	{
		mode.OutputMode = aOutputModes[i];
		if (bRGB)
		{
			mode.Format = XN_PIXEL_FORMAT_RGB24;
			m_aSupportedModes.AddLast(mode);
		}

		if (bMJPEG)
		{
			mode.Format = XN_PIXEL_FORMAT_MJPEG;
			m_aSupportedModes.AddLast(mode);
		}
	}

	CMediaType mediaType;
	GetMediaType(0, &mediaType);
	SetMediaType(&mediaType);
}
STDMETHODIMP TffdshowVideoInputPin::Receive(IMediaSample* pSample)
{
    AM_MEDIA_TYPE *pmt = NULL;
    if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) {
        CAutoLock lock2(&m_csCodecs_and_imgFilters);
        CMediaType mt(*pmt);
        SetMediaType(&mt);
        allocator.mtChanged = false;
        DeleteMediaType(pmt);
    }
    return TinputPin::Receive(pSample);
}
Example #6
0
STDMETHODIMP
BasePin::ReceiveConnection(IPin * aPin,
                             const AM_MEDIA_TYPE *aMediaType)
{
  if (!aPin)
    return E_POINTER;

  if (!aMediaType)
    E_POINTER;

  CriticalSectionAutoEnter monitor(*mLock);

  if (IsConnected())
    return VFW_E_ALREADY_CONNECTED;

  if (!IsStopped())
    return VFW_E_NOT_STOPPED;

  HRESULT hr = CheckConnect(aPin);
  if (FAILED(hr)) {
    BreakConnect();
    return hr;
  }

  // See if subclass supports the specified media type.
  const MediaType* mediaType = reinterpret_cast<const MediaType*>(aMediaType);
  hr = CheckMediaType(mediaType);
  if (FAILED(hr)) {
    BreakConnect();
    return hr;
  }

  // Supported, set it.
  hr = SetMediaType(mediaType);
  if (FAILED(hr))
    return hr;

  // Complete the connection.
  mConnectedPin = aPin;
  // Give the subclass one last chance to refuse the connection.
  hr = CompleteConnect(aPin);
  if (FAILED(hr)) {
    // Subclass refused connection, fail...
    mConnectedPin = NULL;
    BreakConnect();
    return hr;
  }

  // It's all good, we're connected.
  return S_OK;
}
Example #7
0
// Attempt to connect this pin to |aPin| using given media type.
HRESULT
BasePin::AttemptConnection(IPin* aPin,
                             const MediaType* aMediaType)
{
  CriticalSectionAutoEnter monitor(*mLock);

  // Ensure we can connect to the other pin. Gives subclasses a chance
  // to prevent connection.
  HRESULT hr = CheckConnect(aPin);
  if (FAILED(hr)) {
    BreakConnect();
    return hr;
  }

  // Ensure we can connect with this media type. This gives subclasses a
  // chance to abort the connection.
  hr = CheckMediaType(aMediaType);
  if (FAILED(hr))
    return hr;

  hr = SetMediaType(aMediaType);
  if (FAILED(hr))
    return hr;

  // Ask the other pin if it will accept a connection with our media type.
  hr = aPin->ReceiveConnection(static_cast<IPin*>(this), aMediaType);
  if (FAILED(hr))
    return hr;

  // Looks good so far, give subclass one final chance to refuse connection...
  mConnectedPin = aPin;
  hr = CompleteConnect(aPin);

  if (FAILED(hr)) {
    // Subclass refused the connection, inform the other pin that we're
    // disconnecting, and break the connection.
    aPin->Disconnect();
    BreakConnect();
    mConnectedPin = NULL;
    mMediaType.Clear();
    return VFW_E_TYPE_NOT_ACCEPTED;
  }

  // Otherwise, we're all good!
  return S_OK;
}
Example #8
0
// dynamic switch with Video Renderer (to change allocated buffer size as well
// as format type
HRESULT 
BridgeSourceOutput::SwitchTo(const CMediaType* pmt)
{
    // must wait until queue is empty
    if (m_pQueue != NULL)
    {
        while (!m_pQueue->IsIdle())
        {
            m_evQueue.Wait();
        }
    }


    // now perform request
    HRESULT hr = GetConnected()->ReceiveConnection(this, pmt);
    LOG((TEXT("ReceiveConnection 0x%x"), hr));

    if (SUCCEEDED(hr))
    {
        SetMediaType(pmt);

        // for VMR, that's enough, but for old VR we need to re-commit the allocator
        m_pAllocator->Decommit();

        m_bUpstreamTypeChanged = true;
        ALLOCATOR_PROPERTIES prop;
        hr = m_pAllocator->GetProperties(&prop);
        if (SUCCEEDED(hr))
        {
            hr = DecideBufferSize(m_pAllocator, &prop);
            if (FAILED(hr))
            {
                LOG((TEXT("Allocator failure on ReceiveConnection 0x%x"), hr));
            }
        }

        if (SUCCEEDED(hr))
        {
            m_pInputPin->NotifyAllocator(m_pAllocator, false);
        }
        m_pAllocator->Commit();
    }
    return hr;
}
Example #9
0
ErrorCode BasePinImpl::ReceiveConnection(IPin *pConnector, MediaType* pMediaType)
{
	if (pConnector == NULL)
	{
		ASSERT(0);
		//return E_INVALIDARG;
		throw -1;
	}

	if (m_dir != PINDIR_INPUT)
	{
		ASSERT(0);
		return Error;
	}

	ErrorCode hr;

	hr = CheckConnect(pConnector);
	if (hr < 0)
	{
		BreakConnect();
		return hr;
	}

	hr = CheckMediaType(pMediaType);
	if (hr < 0)
	{
		BreakConnect();
		return hr;
	}

	// Is the order of these last two correct?

	SetMediaType(pMediaType);

	hr = CompleteConnect(pConnector);
	if (hr < 0)
	{
		// BreakConnect(); ??
		return hr;
	}

	return 0;
}
Example #10
0
HRESULT TtextInputPin::Receive(IMediaSample *pSample)
{
    HRESULT hr;
    ASSERT(pSample);

    hr=CDeCSSInputPin::Receive(pSample);
    if (FAILED(hr)) {
        return hr;
    }

    AM_MEDIA_TYPE* pmt=NULL;
    if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) {
        CMediaType mt(*pmt);
        bool oldfirsttime=firsttime;
        SetMediaType(&mt);
        DeleteMediaType(pmt);
        firsttime=oldfirsttime;
    }

    if (firsttime) {
        DPRINTF(_l("TtextInputPin::Receive initSubtitles"));
        firsttime=false;
        found=filter->initSubtitles(id,type,extradata,extradatasize);
    }

    REFERENCE_TIME t1=-1,t2=-1;
    pSample->GetTime(&t1,&t2);

    BYTE *data;
    pSample->GetPointer(&data);
    long datalen=pSample->GetActualDataLength();
    if (Tsubreader::isDVDsub(type)) {
        StripPacket(data,datalen);
    }
    //int sStart=float(t1+segmentStart)/REF_SECOND_MULT,sStop=float(t2+segmentStart)/REF_SECOND_MULT;
    //data[datalen]='\0';
    //DPRINTF(_l("%02i:%02i:%02i-%02i:%02i:%02i %s"),sStart/3600,(sStart%3600)/60,sStart%60,sStop/3600,(sStop%3600)/60,sStop%60,(const char_t*)text<char_t>((const char*)data));
    if (data && datalen>0) {
        filter->addSubtitle(id,t1+segmentStart,t2+segmentStart,data,datalen,utf8);
    }
    return S_OK;
}
Example #11
0
int SetCciFlags(u8 *flags, cci_settings *set)
{
	// Backup Write Wait Time
	if(SetBackupWriteWaitTime(&flags[cciflag_BACKUP_WRITE_WAIT_TIME], set->rsf))
		return INVALID_RSF_OPT;
	// Platform
	flags[cciflag_MEDIA_PLATFORM] = cciplatform_CTR;
	// Card Type
	if(SetMediaType(&flags[cciflag_MEDIA_TYPE], set))
		return INVALID_RSF_OPT;
	// Media Unit
	flags[cciflag_MEDIA_BLOCK_SIZE] = GetCtrBlockSizeFlag(set->romInfo.blockSize);
	// Card Device
	if(SetCardDevice(flags, set->romInfo.saveSize, set->rsf))
		return INVALID_RSF_OPT;
	
	set->romInfo.mediaType = flags[cciflag_MEDIA_TYPE];
	set->romInfo.cardDevice = flags[cciflag_CARD_DEVICE] | flags[cciflag_CARD_DEVICE_OLD];
	
	return 0;
}
Example #12
0
ErrorCode BasePinImpl::AttemptConnection(IPin *pReceivePin, MediaType* mt)
{
	ErrorCode hr;

	//CheckConnect( Why Is this called here, it will be called multiple times if it's here?

	hr = CheckConnect(pReceivePin);
	if (hr < 0)
	{
		BreakConnect();
		return hr;
	}

	hr = CheckMediaType(mt);
	if (hr < 0)
	{
		BreakConnect();
		return hr;
	}

	hr = pReceivePin->ReceiveConnection(this, mt);
	if (hr < 0) return hr;

	hr = SetMediaType(mt);	// ?? Have this here??
	if (hr < 0)
	{
		BreakConnect();
		return hr;
	}

	hr = CompleteConnect(pReceivePin);
	if (hr < 0)
	{
		Disconnect();
		return hr;
	}

	return 0;
}
Example #13
0
HRESULT CFLICStream::FillBuffer(IMediaSample* pSample)
{
	HRESULT hr;

	{
		CAutoLock cAutoLockShared(&m_cSharedState);

        if(m_rtPosition >= m_rtStop)
			return S_FALSE;

		BYTE* pDataIn = m_pFrameBuffer;
		BYTE* pDataOut = NULL;
		if(!pDataIn || FAILED(hr = pSample->GetPointer(&pDataOut)) || !pDataOut)
			return S_FALSE;

		AM_MEDIA_TYPE* pmt;
		if(SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt)
		{
			CMediaType mt(*pmt);
			SetMediaType(&mt);

			DeleteMediaType(pmt);
		}

		int w, h, bpp;
		if(m_mt.formattype == FORMAT_VideoInfo)
		{
			w = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biWidth;
			h = abs(((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biHeight);
			bpp = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader.biBitCount;
		}
		else if(m_mt.formattype == FORMAT_VideoInfo2)
		{
			w = ((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biWidth;
			h = abs(((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biHeight);
			bpp = ((VIDEOINFOHEADER2*)m_mt.pbFormat)->bmiHeader.biBitCount;
		}
		else
		{
			return S_FALSE;
		}

		int pitchIn = m_hdr.x;
		int pitchOut = w*bpp>>3;

		int nFrame = m_rtPosition / m_AvgTimePerFrame; // (int)(1.0 * m_rtPosition / m_AvgTimePerFrame + 0.5);

		{
			SeekToNearestKeyFrame(nFrame);

			while(m_nLastFrameNum < nFrame && !m_bFlushing)
				ExtractFrame(++m_nLastFrameNum);

			for(int y = 0, p = min(pitchIn, pitchOut); 
				y < h; 
				y++, pDataIn += pitchIn, pDataOut += pitchOut)
			{
				BYTE* src = pDataIn;
				BYTE* end = src + p;
				DWORD* dst = (DWORD*)pDataOut;
				while(src < end) *dst++ = m_pPalette[*src++];
			}
		}

		pSample->SetActualDataLength(pitchOut*h);

		REFERENCE_TIME rtStart, rtStop;
        // The sample times are modified by the current rate.
        rtStart = static_cast<REFERENCE_TIME>(m_rtSampleTime / m_dRateSeeking);
        rtStop  = rtStart + static_cast<int>(m_AvgTimePerFrame / m_dRateSeeking);
        pSample->SetTime(&rtStart, &rtStop);

        m_rtSampleTime += m_AvgTimePerFrame;
        m_rtPosition += m_AvgTimePerFrame;
	}

	pSample->SetSyncPoint(TRUE);

	if(m_bDiscontinuity) 
    {
		pSample->SetDiscontinuity(TRUE);
		m_bDiscontinuity = FALSE;
	}

	return S_OK;
}
Example #14
0
HRESULT CSubtitleStream::FillBuffer(IMediaSample* pSample)
{
	HRESULT hr;

	{
		CAutoLock cAutoLockShared(&m_cSharedState);

		BYTE* pData = NULL;
		if (FAILED(hr = pSample->GetPointer(&pData)) || !pData) {
			return S_FALSE;
		}

		AM_MEDIA_TYPE* pmt;
		if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) {
			CMediaType mt(*pmt);
			SetMediaType(&mt);
			DeleteMediaType(pmt);
		}

		int len = 0;
		REFERENCE_TIME rtStart, rtStop;

		if (m_mt.majortype == MEDIATYPE_Video && m_mt.subtype == MEDIASUBTYPE_ARGB32) {
			rtStart = (REFERENCE_TIME)((m_nPosition*_ATPF - m_rtStart) / m_dRateSeeking);
			rtStop = (REFERENCE_TIME)(((m_nPosition+1)*_ATPF - m_rtStart) / m_dRateSeeking);
			if (m_rtStart+rtStart >= m_rtDuration) {
				return S_FALSE;
			}

			BITMAPINFOHEADER& bmi = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader;

			SubPicDesc spd;
			spd.w = _WIDTH;
			spd.h = _HEIGHT;
			spd.bpp = 32;
			spd.pitch = bmi.biWidth*4;
			spd.bits = pData;

			len = spd.h*spd.pitch;

			for (int y = 0; y < spd.h; y++) {
				memsetd((DWORD*)(pData + spd.pitch*y), 0xff000000, spd.w*4);
			}

			RECT bbox;
			m_rts.Render(spd, m_nPosition*_ATPF, 10000000.0/_ATPF, bbox);

			for (int y = 0; y < spd.h; y++) {
				DWORD* p = (DWORD*)(pData + spd.pitch*y);
				for (int x = 0; x < spd.w; x++, p++) {
					*p = (0xff000000-(*p&0xff000000))|(*p&0xffffff);
				}
			}
		} else if (m_mt.majortype == MEDIATYPE_Video && m_mt.subtype == MEDIASUBTYPE_RGB32) {
			const STSSegment* stss = m_rts.GetSegment(m_nPosition);
			if (!stss) {
				return S_FALSE;
			}

			BITMAPINFOHEADER& bmi = ((VIDEOINFOHEADER*)m_mt.pbFormat)->bmiHeader;

			SubPicDesc spd;
			spd.w = _WIDTH;
			spd.h = _HEIGHT;
			spd.bpp = 32;
			spd.pitch = bmi.biWidth*4;
			spd.bits = pData;

			len = spd.h*spd.pitch;

			for (int y = 0; y < spd.h; y++) {
				DWORD c1 = 0xff606060, c2 = 0xffa0a0a0;
				if (y&32) {
					c1 ^= c2, c2 ^= c1, c1 ^= c2;
				}
				DWORD* p = (DWORD*)(pData + spd.pitch*y);
				for (int x = 0; x < spd.w; x+=32, p+=32) {
					memsetd(p, (x&32) ? c1 : c2, min(spd.w-x,32)*4);
				}
			}

			RECT bbox;
			m_rts.Render(spd, 10000i64*(stss->start+stss->end)/2, 10000000.0/_ATPF, bbox);

			rtStart = (REFERENCE_TIME)((10000i64*stss->start - m_rtStart) / m_dRateSeeking);
			rtStop = (REFERENCE_TIME)((10000i64*stss->end - m_rtStart) / m_dRateSeeking);
		} else {
			if ((size_t)m_nPosition >= m_rts.GetCount()) {
				return S_FALSE;
			}

			STSEntry& stse = m_rts[m_nPosition];

			if (stse.start >= m_rtStop/10000) {
				return S_FALSE;
			}

			if (m_mt.majortype == MEDIATYPE_Subtitle && m_mt.subtype == MEDIASUBTYPE_UTF8) {
				CStringA str = UTF16To8(m_rts.GetStrW(m_nPosition, false));
				memcpy((char*)pData, str, len = str.GetLength());
			} else if (m_mt.majortype == MEDIATYPE_Subtitle && (m_mt.subtype == MEDIASUBTYPE_SSA || m_mt.subtype == MEDIASUBTYPE_ASS)) {
				CStringW line;
				line.Format(L"%d,%d,%s,%s,%d,%d,%d,%s,%s",
							stse.readorder, stse.layer, CStringW(stse.style), CStringW(stse.actor),
							stse.marginRect.left, stse.marginRect.right, (stse.marginRect.top+stse.marginRect.bottom)/2,
							CStringW(stse.effect), m_rts.GetStrW(m_nPosition, true));

				CStringA str = UTF16To8(line);
				memcpy((char*)pData, str, len = str.GetLength());
			} else if (m_mt.majortype == MEDIATYPE_Text && m_mt.subtype == MEDIASUBTYPE_NULL) {
				CStringA str = m_rts.GetStrA(m_nPosition, false);
				memcpy((char*)pData, str, len = str.GetLength());
			} else {
				return S_FALSE;
			}

			rtStart = (REFERENCE_TIME)((10000i64*stse.start - m_rtStart) / m_dRateSeeking);
			rtStop = (REFERENCE_TIME)((10000i64*stse.end - m_rtStart) / m_dRateSeeking);
		}

		pSample->SetTime(&rtStart, &rtStop);
		pSample->SetActualDataLength(len);

		m_nPosition++;
	}

	pSample->SetSyncPoint(TRUE);

	if (m_bDiscontinuity) {
		pSample->SetDiscontinuity(TRUE);
		m_bDiscontinuity = FALSE;
	}

	return S_OK;
}
Example #15
0
HRESULT CAudioPin::FillBuffer(IMediaSample *pSample)
{
  try
  {
    CDeMultiplexer& demux=m_pTsReaderFilter->GetDemultiplexer();
    CBuffer* buffer=NULL;
    bool earlyStall = false;
    
    //get file-duration and set m_rtDuration
    GetDuration(NULL);
    
    do
    {
      //Check if we need to wait for a while
      DWORD timeNow = GET_TIME_NOW();
      while (timeNow < (m_LastFillBuffTime + m_FillBuffSleepTime))
      {      
        Sleep(1);
        timeNow = GET_TIME_NOW();
      }
      m_LastFillBuffTime = timeNow;

      //did we reach the end of the file
      if (demux.EndOfFile())
      {
        int ACnt, VCnt;
        demux.GetBufferCounts(&ACnt, &VCnt);
        if (ACnt <= 0 && VCnt <= 0) //have we used all the data ?
        {
          LogDebug("audPin:set eof");
          m_FillBuffSleepTime = 5;
          CreateEmptySample(pSample);
          m_bInFillBuffer = false;
          return S_FALSE; //S_FALSE will notify the graph that end of file has been reached
        }
      }

      //if the filter is currently seeking to a new position
      //or this pin is currently seeking to a new position then
      //we dont try to read any packets, but simply return...
      if (m_pTsReaderFilter->IsSeeking() || m_pTsReaderFilter->IsStopping() || demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated)
      {
        m_FillBuffSleepTime = 5;
        CreateEmptySample(pSample);
        m_bInFillBuffer = false;
        if (demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated)
        {
          //Force discon on next good sample
          m_sampleCount = 0;
          m_bDiscontinuity=true;
        }
        if (!m_pTsReaderFilter->m_bStreamCompensated && (m_nNextAFT != 0))
        {
          ClearAverageFtime();
        }
        return NOERROR;
      }
      else
      {
        m_FillBuffSleepTime = 1;
        m_bInFillBuffer = true;
      }     
                  
      // Get next audio buffer from demultiplexer
      buffer=demux.GetAudio(earlyStall, m_rtStart);

      if (buffer==NULL)
      {
        m_FillBuffSleepTime = 5;
      }
      else
      {
        m_bPresentSample = true ;
        
        if (buffer->GetForcePMT())
        {
          m_bAddPMT = true;
        }
        if (buffer->GetDiscontinuity())
        {
          m_bDiscontinuity = true;
        }
        
        CRefTime RefTime,cRefTime ;
        bool HasTimestamp ;
        double fTime = 0.0;
        double clock = 0.0;
        double stallPoint = AUDIO_STALL_POINT;
        //check if it has a timestamp
        if ((HasTimestamp=buffer->MediaTime(RefTime)))
        {
          cRefTime = RefTime ;
					cRefTime -= m_rtStart ;
          //adjust the timestamp with the compensation
          cRefTime-= m_pTsReaderFilter->GetCompensation() ;
          
          //Check if total compensation offset is more than +/-10ms
          if (abs(m_pTsReaderFilter->GetTotalDeltaComp()) > 100000)
          {
            if (!m_bDisableSlowPlayDiscontinuity)
            {
              //Force downstream filters to resync by setting discontinuity flag
              pSample->SetDiscontinuity(TRUE);
            }
            m_pTsReaderFilter->ClearTotalDeltaComp();
          }

          REFERENCE_TIME RefClock = 0;
          m_pTsReaderFilter->GetMediaPosition(&RefClock) ;
          clock = (double)(RefClock-m_rtStart.m_time)/10000000.0 ;
          fTime = ((double)cRefTime.m_time/10000000.0) - clock ;
          
          //Calculate a mean 'fTime' value using 'raw' fTime data
          CalcAverageFtime(fTime);
          if (timeNow < (m_pTsReaderFilter->m_lastPauseRun + (30*1000)))
          {
            //do this for 30s after start of play, a flush or pause
            m_fAFTMeanRef = m_fAFTMean;
          }
          
          //Add compensation time for external downstream audio delay
          //to stop samples becoming 'late' (note: this does NOT change the actual sample timestamps)
          fTime -= m_fAFTMeanRef;  //remove the 'mean' offset
          fTime += ((AUDIO_STALL_POINT/2.0) + 0.2); //re-centre the timing                 

          //Discard late samples at start of play,
          //and samples outside a sensible timing window during play 
          //(helps with signal corruption recovery)
          cRefTime -= m_pTsReaderFilter->m_ClockOnStart.m_time;

          if (fTime < -2.0)
          {                          
            if ((m_dRateSeeking == 1.0) && (m_pTsReaderFilter->State() == State_Running) && (clock > 8.0) && !demux.m_bFlushDelegated)
            { 
              //Very late - request internal flush and re-sync to stream
              demux.DelegatedFlush(false, false);
              LogDebug("audPin : Audio to render very late, flushing") ;
            }
          }
          
          if ((cRefTime.m_time >= PRESENT_DELAY) && 
              (fTime > ((cRefTime.m_time >= FS_TIM_LIM) ? -0.3 : -0.5)) && (fTime < 2.5))
          {
            if ((fTime > stallPoint) && (m_sampleCount > 2))
            {
              //Too early - stall to avoid over-filling of audio decode/renderer buffers,
              //but don't enable at start of play to make sure graph starts properly
              m_FillBuffSleepTime = 10;
              buffer = NULL;
              earlyStall = true;
              continue;
            }           
          }
          else //Don't drop samples normally - it upsets the rate matching in the audio renderer
          {
            // Sample is too late.
            m_bPresentSample = false ;
          }
          cRefTime += m_pTsReaderFilter->m_ClockOnStart.m_time;         
        }

        if (m_bPresentSample && (m_dRateSeeking == 1.0) && (buffer->Length() > 0))
        {
          //do we need to set the discontinuity flag?
          if (m_bDiscontinuity)
          {
            //ifso, set it
            pSample->SetDiscontinuity(TRUE);
            
            LogDebug("audPin: Set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount);
            m_bDiscontinuity=FALSE;
          }

          if (m_bAddPMT && !m_pTsReaderFilter->m_bDisableAddPMT && !m_bPinNoAddPMT)
          {
            //Add MediaType info to sample
            CMediaType mt; 
            int audioIndex = 0;
            demux.GetAudioStream(audioIndex);
            demux.GetAudioStreamType(audioIndex, mt, m_iPosition);
            pSample->SetMediaType(&mt); 
            SetMediaType(&mt);               
            WAVEFORMATEX* wfe = (WAVEFORMATEX*)mt.Format();         
            LogDebug("audPin: Add pmt, fTime:%03.3f SampCnt:%d, Ch:%d, Sr:%d", (float)fTime, m_sampleCount, wfe->nChannels, wfe->nSamplesPerSec);
            m_bAddPMT = false; //Only add once
          }   

          if (HasTimestamp)
          {
            //now we have the final timestamp, set timestamp in sample
            REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTime;
            refTime = (REFERENCE_TIME)((double)refTime/m_dRateSeeking);
            refTime += m_pTsReaderFilter->m_regAudioDelay; //add offset (to produce delay relative to video)

            pSample->SetSyncPoint(TRUE);
            pSample->SetTime(&refTime,&refTime);
            
            if (m_pTsReaderFilter->m_ShowBufferAudio || fTime < 0.02 || (m_sampleCount < 3))
            {
              int cntA, cntV;
              CRefTime firstAudio, lastAudio;
              CRefTime firstVideo, lastVideo, zeroVideo;
              cntA = demux.GetAudioBufferPts(firstAudio, lastAudio); 
              cntV = demux.GetVideoBufferPts(firstVideo, lastVideo, zeroVideo);
              
              LogDebug("Aud/Ref : %03.3f, Compensated = %03.3f ( %0.3f A/V buffers=%02d/%02d), Clk : %f, SampCnt %d, Sleep %d ms, stallPt %03.3f", (float)RefTime.Millisecs()/1000.0f, (float)cRefTime.Millisecs()/1000.0f, fTime,cntA,cntV, clock, m_sampleCount, m_FillBuffSleepTime, (float)stallPoint);
            }
            if (m_pTsReaderFilter->m_ShowBufferAudio) m_pTsReaderFilter->m_ShowBufferAudio--;
            // CalcAverageFtime(fTime);
              
            if (((float)cRefTime.Millisecs()/1000.0f) > AUDIO_READY_POINT)
            {
              m_pTsReaderFilter->m_audioReady = true;
            }
          }
          else
          {
            //buffer has no timestamp
            pSample->SetTime(NULL,NULL);
            pSample->SetSyncPoint(FALSE);
          }

          //copy buffer in sample
          BYTE* pSampleBuffer;
          pSample->SetActualDataLength(buffer->Length());
          pSample->GetPointer(&pSampleBuffer);
          memcpy(pSampleBuffer,buffer->Data(),buffer->Length());
          //delete the buffer and return
          delete buffer;
          demux.EraseAudioBuff();
        }
        else
        { // Buffer was not displayed because it was out of date, search for next.
          delete buffer;
          demux.EraseAudioBuff();
          buffer=NULL ;
          m_FillBuffSleepTime = (m_dRateSeeking == 1.0) ? 1 : 2;
          m_bDiscontinuity = TRUE; //Next good sample will be discontinuous
        }
      }      
      earlyStall = false;
    } while (buffer==NULL);

    m_bInFillBuffer = false;
    return NOERROR;
  }

  // Should we return something else than NOERROR when hitting an exception?
  catch(int e)
  {
    LogDebug("audPin:fillbuffer exception %d", e);
  }
  catch(...)
  {
    LogDebug("audPin:fillbuffer exception ...");
  }
  m_FillBuffSleepTime = 5;
  CreateEmptySample(pSample);
  m_bDiscontinuity = TRUE; //Next good sample will be discontinuous  
  m_bInFillBuffer = false; 
  return NOERROR;
}
Example #16
0
STDMETHODIMP CStreamSwitcherInputPin::Receive(IMediaSample* pSample)
{
    AM_MEDIA_TYPE* pmt = nullptr;
    if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) {
        const CMediaType mt(*pmt);
        DeleteMediaType(pmt), pmt = nullptr;
        SetMediaType(&mt);
    }

    // DAMN!!!!!! this doesn't work if the stream we are blocking
    // shares the same thread with another stream, mpeg splitters
    // are usually like that. Our nicely built up multithreaded
    // strategy is useless because of this, ARRRRRRGHHHHHH.

#ifdef BLOCKSTREAM
    if (m_fCanBlock) {
        m_evBlock.Wait();
    }
#endif

    if (!IsActive()) {
#ifdef BLOCKSTREAM
        if (m_fCanBlock) {
            return S_FALSE;
        }
#endif

        TRACE(_T("&^$#@ : a stupid fix for this stupid problem\n"));
        //Sleep(32);
        return E_FAIL; // a stupid fix for this stupid problem
    }

    CAutoLock cAutoLock(&m_csReceive);

    CStreamSwitcherOutputPin* pOut = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->GetOutputPin();
    ASSERT(pOut->GetConnected());

    HRESULT hr = __super::Receive(pSample);
    if (S_OK != hr) {
        return hr;
    }

    if (m_SampleProps.dwStreamId != AM_STREAM_MEDIA) {
        return pOut->Deliver(pSample);
    }

    //

    ALLOCATOR_PROPERTIES props, actual;
    hr = m_pAllocator->GetProperties(&props);
    hr = pOut->CurrentAllocator()->GetProperties(&actual);

    REFERENCE_TIME rtStart = 0, rtStop = 0;
    if (S_OK == pSample->GetTime(&rtStart, &rtStop)) {
        //
    }

    long cbBuffer = pSample->GetActualDataLength();

    CMediaType mtOut = m_mt;
    mtOut = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->CreateNewOutputMediaType(mtOut, cbBuffer);

    bool fTypeChanged = false;

    if (mtOut != pOut->CurrentMediaType() || cbBuffer > actual.cbBuffer) {
        fTypeChanged = true;

        m_SampleProps.dwSampleFlags |= AM_SAMPLE_TYPECHANGED/*|AM_SAMPLE_DATADISCONTINUITY|AM_SAMPLE_TIMEDISCONTINUITY*/;

        /*
                if (CComQIPtr<IPinConnection> pPC = pOut->CurrentPinConnection())
                {
                    HANDLE hEOS = CreateEvent(nullptr, FALSE, FALSE, nullptr);
                    hr = pPC->NotifyEndOfStream(hEOS);
                    hr = pOut->DeliverEndOfStream();
                    WaitForSingleObject(hEOS, 3000);
                    CloseHandle(hEOS);
                    hr = pOut->DeliverBeginFlush();
                    hr = pOut->DeliverEndFlush();
                }
        */

        if (props.cBuffers < 8 && mtOut.majortype == MEDIATYPE_Audio) {
            props.cBuffers = 8;
        }

        props.cbBuffer = cbBuffer;

        if (actual.cbAlign != props.cbAlign
                || actual.cbPrefix != props.cbPrefix
                || actual.cBuffers < props.cBuffers
                || actual.cbBuffer < props.cbBuffer) {
            hr = pOut->DeliverBeginFlush();
            hr = pOut->DeliverEndFlush();
            hr = pOut->CurrentAllocator()->Decommit();
            hr = pOut->CurrentAllocator()->SetProperties(&props, &actual);
            hr = pOut->CurrentAllocator()->Commit();
        }
    }

    CComPtr<IMediaSample> pOutSample;
    if (FAILED(InitializeOutputSample(pSample, &pOutSample))) {
        return E_FAIL;
    }

    pmt = nullptr;
    if (SUCCEEDED(pOutSample->GetMediaType(&pmt)) && pmt) {
        const CMediaType mt(*pmt);
        DeleteMediaType(pmt), pmt = nullptr;
        // TODO
        ASSERT(0);
    }

    if (fTypeChanged) {
        pOut->SetMediaType(&mtOut);
        (static_cast<CStreamSwitcherFilter*>(m_pFilter))->OnNewOutputMediaType(m_mt, mtOut);
        pOutSample->SetMediaType(&mtOut);
    }

    // Transform

    hr = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->Transform(pSample, pOutSample);

    //

    if (S_OK == hr) {
        hr = pOut->Deliver(pOutSample);
        m_bSampleSkipped = FALSE;
        /*
                if (FAILED(hr))
                {
                    ASSERT(0);
                }
        */
    } else if (S_FALSE == hr) {
        hr = S_OK;
        pOutSample = nullptr;
        m_bSampleSkipped = TRUE;

        if (!m_bQualityChanged) {
            m_pFilter->NotifyEvent(EC_QUALITY_CHANGE, 0, 0);
            m_bQualityChanged = TRUE;
        }
    }

    return hr;
}
// Attempts to set an output type on the mixer.
HRESULT EVRCustomPresenter::RenegotiateMediaType()
{
  Log("EVRCustomPresenter::RenegotiateMediaType");

  HRESULT hr = S_OK;
  BOOL bFoundMediaType = FALSE;

  IMFMediaType *pMixerType = NULL;
  IMFMediaType *pOptimalType = NULL;
  IMFVideoMediaType *pVideoType = NULL;

  CheckPointer(m_pMixer, MF_E_INVALIDREQUEST);

  // Loop through all of the mixer's proposed output types.
  DWORD iTypeIndex = 0;
  while (!bFoundMediaType && (hr != MF_E_NO_MORE_TYPES))
  {
    SAFE_RELEASE(pMixerType);
    SAFE_RELEASE(pOptimalType);

    // Step 1. Get the next media type supported by mixer.
    hr = m_pMixer->GetOutputAvailableType(0, iTypeIndex++, &pMixerType);
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType no usable media type found");
      break;
    }

    // From now on, if anything in this loop fails, try the next type, until we succeed or the mixer runs out of types.

    // Step 2. Check if we support this media type. 
    hr = IsMediaTypeSupported(pMixerType);
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType EVRCustomPresenter::IsMediaTypeSupported failed");
      continue;
    }

    // Step 3. Adjust the mixer's type to match our requirements.
    hr = CreateOptimalVideoType(pMixerType, &pOptimalType);
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType EVRCustomPresenter::CreateOptimalVideoType failed");
      continue;
    }

    // Step 4. Check if the mixer will accept this media type.
    hr = m_pMixer->SetOutputType(0, pOptimalType, MFT_SET_TYPE_TEST_ONLY);
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType IMFTransform::SetOutputType");
      continue;
    }

    // Step 5. Try to set the media type on ourselves.
    hr = SetMediaType(pOptimalType);
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType EVRCustomPresenter::SetMediaType failed");
      continue;
    }

    // Step 6. Set output media type on mixer.
    hr = m_pMixer->SetOutputType(0, pOptimalType, 0);
    assert(SUCCEEDED(hr)); // This should succeed unless the MFT lied in the previous call.
    if (FAILED(hr))
    {
      Log("EVRCustomPresenter::RenegotiateMediaType IMFTransform::SetOutputType failed");
      SetMediaType(NULL);
      continue;
    }

    // valid media type found and output set, exit loop
    bFoundMediaType = TRUE;
  }

  SAFE_RELEASE(pMixerType);
  SAFE_RELEASE(pOptimalType);
  SAFE_RELEASE(pVideoType);

  return hr;
}
Example #18
0
HRESULT CAudioPin::FillBuffer(IMediaSample *pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_demux.m_bAudioWaitForSeek)
      {
        m_demux.m_bAudioWaitForSeek = false;
        m_bSeekDone = false;
      }

      if (!m_bSeekDone || m_pFilter->IsStopping() || m_bFlushing || m_demux.IsMediaChanging() || m_demux.m_bRebuildOngoing || 
        m_demux.m_eAudioClipSeen->Check())
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("aud: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
      }
      else
        buffer = m_demux.GetAudio();

      if (m_demux.EndOfFile())
      {
        LogDebug("aud: set EOF");
        m_demux.m_eAudioClipSeen->Set();
        return S_FALSE;
      }

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to allow audio renderer to stop buffering.
            // This should only happen when the stream enters into paused state
            //LogDebug("aud: FillBuffer - DeliverEndOfStream");
            //DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);

          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;
        REFERENCE_TIME rtStart = m_rtStart;

        //JoinAudioBuffers(buffer, &demux);
        
        {
          CAutoLock lock(m_section);

          if (m_demux.m_bAudioResetStreamPosition)
          {
            m_demux.m_bAudioResetStreamPosition = false;
            m_bZeroTimeStream = true;
          }

          if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
          {
            LogDebug("aud: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
              buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);

            checkPlaybackState = true;
            m_bClipEndingNotified = false;

            m_demux.m_eAudioClipSeen->Set();
          }

          // Do not convert LPCM to PCM if audio decoder supports LPCM (LAV audio decoder style)
          if (!m_bUsePCM && buffer->pmt && buffer->pmt->subtype == MEDIASUBTYPE_PCM)
            buffer->pmt->subtype = MEDIASUBTYPE_BD_LPCM_AUDIO;

          if (buffer->pmt && m_mt != *buffer->pmt && !((buffer->nNewSegment & NS_NEW_CLIP)==NS_NEW_CLIP))
          {
            HRESULT hrAccept = S_FALSE;
            LogMediaType(buffer->pmt);

            if (m_pPinConnection && false) // TODO - DS audio renderer seems to be only one that supports this
              hrAccept = m_pPinConnection->DynamicQueryAccept(buffer->pmt);
            else if (m_pReceiver)
            {
              //LogDebug("aud: DynamicQueryAccept - not avail");
              GUID guid = buffer->pmt->subtype;
              if (buffer->pmt->subtype == MEDIASUBTYPE_PCM)
              {
                buffer->pmt->subtype = MEDIASUBTYPE_BD_LPCM_AUDIO;
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }
              
              if (hrAccept != S_OK)
              {
                buffer->pmt->subtype = guid;
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
                m_bUsePCM = true;
              }
              else
                m_bUsePCM = false;
            }

            if (hrAccept != S_OK)
            {
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);

              LogDebug("aud: graph rebuilding required");

              m_demux.m_bAudioRequiresRebuild = true;
              checkPlaybackState = true;

              DeliverEndOfStream();
            }
            else
            {
              LogDebug("aud: format change accepted");
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);
              pSample->SetMediaType(&mt);
              m_pCachedBuffer = buffer;

              return ERROR_NO_DATA;
            }
          }
        } // lock ends

        if (checkPlaybackState)
        {
          m_pCachedBuffer = buffer;
          LogDebug("aud: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
          
          if (checkPlaybackState)
          {
            if (buffer->pmt && m_mt != *buffer->pmt && !((buffer->nNewSegment & NS_NEW_CLIP)==NS_NEW_CLIP))
            {
              CMediaType mt(*buffer->pmt);
              SetMediaType(&mt);
            }
          }
          m_pCachedBuffer->nNewSegment = 0;

          return ERROR_NO_DATA;
        }
  
        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;

        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp && m_dRateSeeking == 1.0)
        {
          bool setPMT = false;

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("aud: set discontinuity");
            pSample->SetDiscontinuity(true);
            setPMT = true;
            m_bDiscontinuity = false;
          }

          if (buffer->pmt || setPMT)
          {
            LogDebug("aud: set PMT");
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;          
          }

          if (hasTimestamp)
          {
            if (m_bZeroTimeStream)
            {
              m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
              m_bZeroTimeStream=false;
            }
            // Now we have the final timestamp, set timestamp in sample
            //REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTimeStart;
            //refTime /= m_dRateSeeking; //the if rate===1.0 makes this redundant

            pSample->SetSyncPoint(true); // allow all packets to be seeking targets
            rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;//- m_rtStart;
            rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;// - m_rtStart;
            pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);
          }
          else
          {
            // Buffer has no timestamp
            pSample->SetTime(NULL, NULL);
            pSample->SetSyncPoint(false);
          }

          {
            CAutoLock lock(&m_csDeliver);

            if (!m_bFlushing)
            {
              ProcessAudioSample(buffer, pSample);
#ifdef LOG_AUDIO_PIN_SAMPLES
             LogDebug("aud: %6.3f corr %6.3f Playlist time %6.3f clip: %d playlist: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0,
                buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist);
#endif
            }
            else
            {
              LogDebug("aud: dropped sample as flush is active!");
              delete buffer;
              return ERROR_NO_DATA;
            }
          }

          m_bFirstSample = false;
          delete buffer;
        }
        else
        { // Buffer was not displayed because it was out of date, search for next.
          delete buffer;
          buffer = NULL;
        }
      }
    } while (!buffer);
    return NOERROR;
  }

  // Should we return something else than NOERROR when hitting an exception?
  catch (int e)
  {
    LogDebug("aud: FillBuffer exception %d", e);
  }
  catch (...)
  {
    LogDebug("aud: FillBuffer exception ...");
  }

  return NOERROR;
}
Example #19
0
GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
{
    IMediaSample *pSample = NULL;
    byte *data = GST_BUFFER_DATA (buffer);
    int attempts = 0;
    HRESULT hres;
    BYTE *sample_buffer;
    AM_MEDIA_TYPE *mediatype;

    StartUsingOutputPin();

    while (attempts < MAX_ATTEMPTS)
    {
        hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    if (FAILED (hres))
    {
        StopUsingOutputPin();
        GST_WARNING ("Could not get sample for delivery to sink: %x", hres);
        return GST_FLOW_ERROR;
    }

    pSample->GetPointer(&sample_buffer);
    pSample->GetMediaType(&mediatype);
    if (mediatype)
        SetMediaType (mediatype);

    if(sample_buffer)
    {
        /* Copy to the destination stride.
         * This is not just a simple memcpy because of the different strides.
         * TODO: optimise for the same-stride case and avoid the copy entirely.
         */
        CopyToDestinationBuffer (data, sample_buffer);
    }

    pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */
    pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */
    pSample->SetPreroll(FALSE); /* For non-displayed frames.
                                 Not used in GStreamer */

    /* Disable synchronising on this sample. We instead let GStreamer handle
     * this at a higher level, inside BaseSink. */
    pSample->SetTime(NULL, NULL);

    while (attempts < MAX_ATTEMPTS)
    {
        hres = Deliver(pSample);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    pSample->Release();

    StopUsingOutputPin();

    if (SUCCEEDED (hres))
        return GST_FLOW_OK;
    else {
        GST_WARNING_OBJECT (this, "Failed to deliver sample: %x", hres);
        if (hres == VFW_E_NOT_CONNECTED)
            return GST_FLOW_NOT_LINKED;
        else
            return GST_FLOW_ERROR;
    }
}
STDMETHODIMP TffdshowDecAudioInputPin::Receive(IMediaSample* pIn)
{
    if (this != filter->inpin) {
        //DPRINTF(_l("TffdshowDecAudioInputPin::Receive Not right pin : this = %u, filter inpin = %u"), this, filter->inpin);
        if (m_useBlock) {
            m_evBlock.Wait();
        } else {
            return S_FALSE;
        }
    }

    if (!isActive()) {
        //DPRINTF(_l("TffdshowDecAudioInputPin::Receive Pin unlocked : this = %u, filter inpin = %u"), this, filter->inpin);
        if (this != filter->inpin) {
            return S_FALSE;
        }
        return E_FAIL;
    }

    //CAutoLock cAutoLock(&m_csReceive);

    if (filter->IsStopped()) {
        return S_FALSE;
    }

    HRESULT hr = TinputPin::Receive(pIn);
    if (hr != S_OK) {
        return hr;
    }

    AM_SAMPLE2_PROPERTIES* const pProps = SampleProps();
    if (pProps->dwStreamId != AM_STREAM_MEDIA) {
        return filter->m_pOutput->Deliver(pIn);
    }

    AM_MEDIA_TYPE *pmt = NULL;
    if (SUCCEEDED(pIn->GetMediaType(&pmt)) && pmt) {
        CMediaType mt(*pmt);
        SetMediaType(&mt);
        DeleteMediaType(pmt);
        pmt = NULL;
    }

    BYTE *src = NULL;
    if (FAILED(hr = pIn->GetPointer(&src))) {
        return hr;
    }
    long srclen = pIn->GetActualDataLength();
    if (strippacket) {
        StripPacket(src, srclen);
    }

    REFERENCE_TIME rtStart = _I64_MIN, rtStop = _I64_MIN;
    hr = pIn->GetTime(&rtStart, &rtStop);

    if (hr == S_OK) {
        insample_rtStart = rtStart;
        insample_rtStop = rtStop;
        //DPRINTF(_l("TffdshowDecAudioInputPin::Receive audio sample start duration %I64i %I64i"),rtStart,rtStop-rtStart);
    }

    if (pIn->IsDiscontinuity() == S_OK) {
        filter->discontinuity = true;
        buf.clear();
        if (FAILED(hr)) {
            return S_OK;
        }
        filter->m_rtStartDec = filter->m_rtStartProc = rtStart;
    }

    if (SUCCEEDED(hr)) {
        REFERENCE_TIME j = filter->m_rtStartDec - rtStart;
        jitter = int(j);
        if ((uint64_t)ff_abs(j) > 100 * (REF_SECOND_MULT / 1000) // +-100ms jitter is allowed for now
                && codecId != AV_CODEC_ID_FLAC
                && codecId != AV_CODEC_ID_TTA
                && codecId != AV_CODEC_ID_WAVPACK
                && codecId != AV_CODEC_ID_TRUEHD
                && codecId != AV_CODEC_ID_MLP
                && codecId != AV_CODEC_ID_COOK
                && !bitstream_codec(codecId)
                && filter->getParam2(IDFF_audio_decoder_JitterCorrection)) {
            DPRINTF(_l("jitter correction"));
            buf.clear();
            newSrcBuffer.clear();
            filter->m_rtStartDec = filter->m_rtStartProc = rtStart;
            if (audioParser) {
                audioParser->NewSegment();
            }
        }
    }

    buf.append(src, srclen);
    buf.reserve(buf.size() + 32);

    AVCodecID newCodecId = codecId;
    TaudioParserData audioParserData;
    // Before sending data to the decoder, we parse it
    switch (codecId) {
        case AV_CODEC_ID_DTS:
        case CODEC_ID_LIBDTS:
        case CODEC_ID_SPDIF_DTS:
        case AV_CODEC_ID_AC3:
        case AV_CODEC_ID_EAC3:
        case AV_CODEC_ID_MLP:
        case AV_CODEC_ID_TRUEHD:
        case CODEC_ID_LIBA52:
        case CODEC_ID_SPDIF_AC3:
        case CODEC_ID_PCM:
        case CODEC_ID_BITSTREAM_TRUEHD:
        case CODEC_ID_BITSTREAM_DTSHD:
            // Search for DTS in Wav only if option is checked
            if (codecId == CODEC_ID_PCM && !searchdts) {
                break;
            }

            // Do not search for DTS in PCM in next frames (otherwise DTS syncword maybe wrongly detected)
            searchdts = false;

            newCodecId = audioParser->parseStream(buf.size() ? &buf[0] : NULL, (int)buf.size(), &newSrcBuffer);
            if (newCodecId == AV_CODEC_ID_NONE) {
                newSrcBuffer.clear();
                break;
            }

            // Get updated data from the parser
            audioParserData = audioParser->getParserData();

            // Clear input buffer (if 2 source buffers are coexisting, sound will be garbled)
            buf.clear();
            if (codecId != newCodecId) {
                DPRINTF(_l("TffdshowDecAudioInputPin : switching codec from %s to %s"), getCodecName(codecId), getCodecName(newCodecId));
                codecId = newCodecId;

                // Update input sample format from (corrected or updated) parser data
                if (audioParserData.channels != 0) {
                    filter->insf.setChannels(audioParserData.channels);
                }
                if (audioParserData.sample_rate != 0) {
                    filter->insf.freq = audioParserData.sample_rate;
                }
                if (audioParserData.sample_format != 0) {
                    filter->insf.sf = audioParserData.sample_format;
                }
                filter->insf.alternateSF = audioParserData.alternateSampleFormat;

                // Sample format from audio parser data
                TsampleFormat fmt = TsampleFormat(
                                        (audioParserData.sample_format != 0) ? audioParserData.sample_format : TsampleFormat::SF_PCM16,
                                        audioParserData.sample_rate, audioParserData.channels);

                // Reinitialize the audio codec according to the new codecId
                DPRINTF(_l("TffdshowDecAudioInputPin::Receive : Initialize audio codec %s"), getCodecName(codecId));
                if (audio) {
                    delete audio;
                    codec = audio = NULL;
                }
                codec = audio = TaudioCodec::initSource(filter, this, codecId, filter->insf, filter->insf.toCMediaType());
                if (!audio) {
                    return false;
                }
                jitter = 0;
            }

            // Update libavcodec context with correct channels and bitrate read from parser
            if (lavc_codec(codecId)) {
                TaudioCodecLibavcodec *audioCodecLibavcodec = (TaudioCodecLibavcodec*)audio;
                if (audioCodecLibavcodec) {
                    if (audioParserData.channels != 0) {
                        audioCodecLibavcodec->avctx->channels = audioParserData.channels;
                    }
                    if (audioParserData.bit_rate != 0) {
                        audioCodecLibavcodec->avctx->bit_rate = audioParserData.bit_rate;
                    }
                    if (audioParserData.sample_rate != 0) {
                        audioCodecLibavcodec->avctx->sample_rate = audioParserData.sample_rate;
                    }
                }
            }

            if (audioParserData.channels != 0) {
                filter->insf.setChannels(audioParserData.channels);
            }
            if (audioParserData.sample_rate != 0) {
                filter->insf.freq = audioParserData.sample_rate;
            }
            if (audioParserData.sample_format != 0) {
                filter->insf.sf = audioParserData.sample_format;
            }

            newSrcBuffer.reserve(newSrcBuffer.size() + 32);
            hr = audio->decode(newSrcBuffer);
            if (hr == S_FALSE) {
                return S_OK;
            } else if (hr != S_OK) {
                DPRINTF(_l("TffdshowDecAudioInputPin::Receive decode failed pin %u (%lx)"), this, hr);
            }
            return hr;
            break;
        default:
            // Decode data
            hr = audio->decode(buf);
            if (hr == S_FALSE) {
                return S_OK;
            } else if (hr != S_OK) {
                DPRINTF(_l("TffdshowDecAudioInputPin::Receive decode failed pin %u (%lx)"), this, hr);
            }
            return hr;
            break;
    }
    hr = audio->decode(buf);
    if (hr == S_FALSE) {
        return S_OK;
    } else if (hr != S_OK) {
        DPRINTF(_l("TffdshowDecAudioInputPin::Receive decode failed pin %u (%lx)"), this, hr);
    }
    return hr;
}
Example #21
0
STDMETHODIMP
CBasePin::ReceiveConnection(
							IPin * pConnector,      // this is the pin who we will connect to
							const AM_MEDIA_TYPE *pmt    // this is the media type we will exchange
							)
{
	CheckPointer(pConnector,E_POINTER);
	CheckPointer(pmt,E_POINTER);
	ValidateReadPtr(pConnector,sizeof(IPin));
	ValidateReadPtr(pmt,sizeof(AM_MEDIA_TYPE));
	CComAutoLock cObjectLock(m_pLock);

#ifdef _DEBUG
	PIN_INFO PinInfo;

	if(!FAILED(pConnector->QueryPinInfo(&PinInfo)))
	{

		CEasyString PinName=PinInfo.achName;



		PrintSystemLog(0,"RecvPin:%s",(LPCTSTR)PinName);


		if(PinInfo.pFilter)
		{

			FILTER_INFO FilterInfo;
			if (!FAILED(PinInfo.pFilter->QueryFilterInfo(&FilterInfo)))
			{

				CLSID ClassID;

				PinInfo.pFilter->GetClassID(&ClassID);

				CEasyString FilterName=FilterInfo.achName;

				PrintSystemLog(0,"RecvPinFilter:%s",(LPCTSTR)FilterName);


				// The FILTER_INFO structure holds a pointer to the Filter Graph
				// Manager, with a reference count that must be released.
				if (FilterInfo.pGraph != NULL)
				{
					FilterInfo.pGraph->Release();
				}
			}

			PinInfo.pFilter->Release();
		}

		//MessageBox(NULL, (LPCTSTR)PinName, TEXT("Filter Name"), MB_OK);
	}

#endif

	/* Are we already connected */
	if (m_Connected) {
		return VFW_E_ALREADY_CONNECTED;
	}

	/* See if the filter is active */
	if (!IsStopped() && !m_bCanReconnectWhenActive) {
		return VFW_E_NOT_STOPPED;
	}

	HRESULT hr = CheckConnect(pConnector);
	if (FAILED(hr)) {
		// Since the procedure is already returning an error code, there
		// is nothing else this function can do to report the error.
		EXECUTE_ASSERT( SUCCEEDED( BreakConnect() ) );


		return hr;
	}

	/* Ask derived class if this media type is ok */

	CMediaType * pcmt = (CMediaType*) pmt;
	hr = CheckMediaType(pcmt);
	if (hr != NOERROR) {
		// no -we don't support this media type

		// Since the procedure is already returning an error code, there
		// is nothing else this function can do to report the error.
		EXECUTE_ASSERT( SUCCEEDED( BreakConnect() ) );

		// return a specific media type error if there is one
		// or map a general failure code to something more helpful
		// (in particular S_FALSE gets changed to an error code)
		if (SUCCEEDED(hr) ||
			(hr == E_FAIL) ||
			(hr == E_INVALIDARG)) {
				hr = VFW_E_TYPE_NOT_ACCEPTED;
		}


		return hr;
	}

	/* Complete the connection */

	m_Connected = pConnector;
	m_Connected->AddRef();
	hr = SetMediaType(pcmt);
	if (SUCCEEDED(hr)) {
		hr = CompleteConnect(pConnector);
		if (SUCCEEDED(hr)) {


			return NOERROR;
		}
	}

	DbgLog((LOG_TRACE, CONNECT_TRACE_LEVEL, TEXT("Failed to set the media type or failed to complete the connection.")));
	m_Connected->Release();
	m_Connected = NULL;

	// Since the procedure is already returning an error code, there
	// is nothing else this function can do to report the error.
	EXECUTE_ASSERT( SUCCEEDED( BreakConnect() ) );


	return hr;
}
Example #22
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = nullptr;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = nullptr;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));
    }

    // Fill the sample
    BYTE* pData = nullptr;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = nullptr;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  // IBitRateInfo
  m_BitRate.nBytesSinceLastDeliverTime += nBytes;

  if (fTimeValid) {
    if (m_BitRate.rtLastDeliverTime == Packet::INVALID_TIME) {
      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }

    if (m_BitRate.rtLastDeliverTime + 10000000 < pPacket->rtStart) {
      REFERENCE_TIME rtDiff = pPacket->rtStart - m_BitRate.rtLastDeliverTime;

      double dSecs, dBits;

      dSecs = rtDiff / 10000000.0;
      dBits = 8.0 * m_BitRate.nBytesSinceLastDeliverTime;
      m_BitRate.nCurrentBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtTotalTimeDelivered += rtDiff;
      m_BitRate.nTotalBytesDelivered += m_BitRate.nBytesSinceLastDeliverTime;

      dSecs = m_BitRate.rtTotalTimeDelivered / 10000000.0;
      dBits = 8.0 * m_BitRate.nTotalBytesDelivered;
      m_BitRate.nAverageBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }
  }

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : nullptr, fTimeValid ? &pPacket->rtStop : nullptr));
  CHECK_HR(hr = pSample->SetMediaTime(nullptr, nullptr));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator || !pSample)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Example #23
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = NULL;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = NULL;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));
    }

    // Fill the sample
    BYTE* pData = NULL;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = NULL;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : NULL, fTimeValid ? &pPacket->rtStop : NULL));
  CHECK_HR(hr = pSample->SetMediaTime(NULL, NULL));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Example #24
0
STDMETHODIMP TffdshowVideoInputPin::ReceiveConnection(IPin* pConnector, const AM_MEDIA_TYPE* pmt)
{
    HRESULT hr;
    DPRINTF(_l("TffdshowVideoInputPin::ReceiveConnection"));
    CAutoLock cObjectLock(m_pLock);
    const CLSID &ref = GetCLSID(pConnector);
    if (ref == CLSID_MPC_MatroskaSplitter || ref == CLSID_GabestMatroskaSplitter) {
        connectedSplitter = MPC_matroska_splitter;
    } else if (ref == CLSID_HaaliMediaSplitter) {
        connectedSplitter = Haali_Media_splitter;
    } else if (ref == CLSID_MPC_MpegSourceFilter || ref == CLSID_MPC_MpegSplitterFilter) {
        connectedSplitter = MPC_mpegSplitters;
    } else if (ref == CLSID_DVBSourceFilter) {
        connectedSplitter = DVBSourceFilter;
    } else if (ref == CLSID_PBDA_DTFilter) {
        connectedSplitter = PBDA_DTFilter;
    } else if (ref == CLSID_NeuviewSource) {
        connectedSplitter = NeuviewSource;
    }

#if 0
    PIN_INFO pininfo;
    FILTER_INFO filterinfo;
    pConnector->QueryPinInfo(&pininfo);
    if (pininfo.pFilter) {
        pininfo.pFilter->QueryFilterInfo(&filterinfo);
        DPRINTF(_l("TffdshowVideoInputPin::ReceiveConnection filter=%s pin=%s"), filterinfo.achName, pininfo.achName);
        if (filterinfo.pGraph) {
            filterinfo.pGraph->Release();
        }
        pininfo.pFilter->Release();
    }
    DPRINTF(_l("CLSID 0x%x,0x%x,0x%x"), ref.Data1, ref.Data2, ref.Data3);
    for (int i = 0; i < 8; i++) {
        DPRINTF(_l(",0x%2x"), ref.Data4[i]);
    }
#endif

    if (m_Connected) {
        CMediaType mt(*pmt);

        BITMAPINFOHEADER bih, bihCur;
        ExtractBIH(mt, &bih);
        ExtractBIH(m_mt, &bihCur);

        // HACK: for the intervideo filter, when it tries to change the pitch from 720 to 704...
        //if(bihCur.biWidth != bih.biWidth  && bihCur.biHeight == bih.biHeight)
        // return S_OK;

        return (CheckMediaType(&mt) != S_OK || SetMediaType(&mt) != S_OK/* || !initVideo(mt)*/)
               ? VFW_E_TYPE_NOT_ACCEPTED
               : S_OK;

        // TODO: send ReceiveConnection downstream
    } else {
        hr = fv->deci->checkInputConnect(pConnector);
        if (hr != S_OK) {
            return hr;
        }
    }

    hr = TinputPin::ReceiveConnection(pConnector, pmt);
    return hr;
}
Example #25
0
HRESULT CVideoPin::FillBuffer(IMediaSample* pSample)
{
  try
  {
    Packet* buffer = NULL;

    do
    {
      if (m_pFilter->IsStopping() || m_demux.IsMediaChanging() || m_bFlushing || !m_bSeekDone || m_demux.m_bRebuildOngoing)
      {
        Sleep(1);
        return ERROR_NO_DATA;
      }

      if (m_demux.EndOfFile())
      {
        LogDebug("vid: set EOF");
        return S_FALSE;
      }

      if (m_demux.m_bVideoClipSeen || m_demux.m_bAudioRequiresRebuild && !m_demux.m_bVideoClipSeen && !m_demux.m_eAudioClipSeen->Check())
      {
        CheckPlaybackState();
        return ERROR_NO_DATA;
      }

      if (m_pCachedBuffer)
      {
        LogDebug("vid: cached fetch %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);
        buffer = m_pCachedBuffer;
        m_pCachedBuffer = NULL;
        buffer->bDiscontinuity = true;
        
        if (m_bProvidePMT)
        {
          CMediaType mt(*buffer->pmt);
          SetMediaType(&mt);
          pSample->SetMediaType(&mt);
          m_bProvidePMT = false;
        }
      }
      else
        buffer = m_demux.GetVideo();

      if (!buffer)
      {
        if (m_bFirstSample)
          Sleep(10);
        else 
        {
          if (!m_bClipEndingNotified)
          {
            // Deliver end of stream notification to flush the video decoder.
            // This should only happen when the stream enters into paused state
            LogDebug("vid: FillBuffer - DeliverEndOfStream");
            DeliverEndOfStream();
            m_bClipEndingNotified = true;
          }
          else
            Sleep(10);
		  
          return ERROR_NO_DATA;
        }
      }
      else
      {
        bool checkPlaybackState = false;

        {
          CAutoLock lock(m_section);

          if (buffer->nNewSegment > 0)
          {
            if ((buffer->nNewSegment & NS_NEW_CLIP) == NS_NEW_CLIP)
            {
              LogDebug("vid: Playlist changed to %d - nNewSegment: %d offset: %6.3f rtStart: %6.3f rtPlaylistTime: %6.3f", 
                buffer->nPlaylist, buffer->nNewSegment, buffer->rtOffset / 10000000.0, buffer->rtStart / 10000000.0, buffer->rtPlaylistTime / 10000000.0);
            
              m_demux.m_bVideoClipSeen = true;
 
              m_bInitDuration = true;
              checkPlaybackState = true;
              m_bClipEndingNotified = false;

              if (buffer->bResuming || buffer->nNewSegment & NS_INTERRUPTED)
              {
                m_bDoFakeSeek = true;
                m_rtStreamOffset = buffer->rtPlaylistTime;
                m_bZeroTimeStream = true;
                m_demux.m_bAudioResetStreamPosition = true;
              }
              else
                m_rtStreamOffset = 0;

              // LAV video decoder requires an end of stream notification to be able to provide complete video frames
              // to downstream filters in a case where we are waiting for the audio pin to see the clip boundary as
              // we cannot provide yet the next clip's PMT downstream since audio stream could require a rebuild
              if (m_currentDecoder == CLSID_LAVVideo && (buffer->nNewSegment & NS_NEW_PLAYLIST))
              {
                LogDebug("DeliverEndOFStream LAV Only for audio pin wait (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                DeliverEndOfStream();
              }
            }
            if ((buffer->nNewSegment & NS_STREAM_RESET) == NS_STREAM_RESET)
              m_bInitDuration = true;
          }

          if (buffer->pmt)
          {
            GUID subtype = subtype = buffer->pmt->subtype;

            if (buffer->pmt->subtype == FOURCCMap('1CVW') && m_VC1Override != GUID_NULL)
            {
              buffer->pmt->subtype = m_VC1Override;
              LogDebug("vid: FillBuffer - force VC-1 GUID");
            }

            if (!CompareMediaTypes(buffer->pmt, &m_mt))
            {
              LogMediaType(buffer->pmt);
            
              HRESULT hrAccept = S_FALSE;
              m_bProvidePMT = true;

              if (m_pReceiver && CheckVideoFormat(&buffer->pmt->subtype, &m_currentDecoder))
              {
                // Currently no other video decoders than LAV seems to be compatible with
                // the dynamic format changes
                if (m_currentDecoder == CLSID_LAVVideo)
                hrAccept = m_pReceiver->QueryAccept(buffer->pmt);
              }

              if (hrAccept != S_OK)
              {
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);

                LogDebug("vid: graph rebuilding required");

                m_demux.m_bVideoRequiresRebuild = true;
                m_bZeroTimeStream = true;
                checkPlaybackState = true;

                //LogDebug("DeliverEndOFStream for rebuild (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //DeliverEndOfStream();
              }
              else
              {
                LogDebug("vid: format change accepted");
                CMediaType mt(*buffer->pmt);
                SetMediaType(&mt);
                pSample->SetMediaType(&mt);

                buffer->nNewSegment = 0;
                m_pCachedBuffer = buffer;
				
                //if (m_currentDecoder == CLSID_LAVVideo)
                //{
                //  LogDebug("DeliverEndOFStream LAV Only (%d,%d)", buffer->nPlaylist, buffer->nClipNumber);
                //  DeliverEndOfStream();
                //}

                return ERROR_NO_DATA;
              }
            } // comparemediatypes
          }
        } // lock ends

        m_rtTitleDuration = buffer->rtTitleDuration;

        if (checkPlaybackState)
        {
          buffer->nNewSegment = 0;
          m_pCachedBuffer = buffer;

          CheckPlaybackState();

          LogDebug("vid: cached push  %6.3f clip: %d playlist: %d", m_pCachedBuffer->rtStart / 10000000.0, m_pCachedBuffer->nClipNumber, m_pCachedBuffer->nPlaylist);

          return ERROR_NO_DATA;
        }

        bool hasTimestamp = buffer->rtStart != Packet::INVALID_TIME;
        REFERENCE_TIME rtCorrectedStartTime = 0;
        REFERENCE_TIME rtCorrectedStopTime = 0;

        if (hasTimestamp)
        {
          if (m_bZeroTimeStream)
          {
            m_rtStreamTimeOffset = buffer->rtStart - buffer->rtClipStartTime;
            m_bZeroTimeStream = false;
          }

          if (m_bDiscontinuity || buffer->bDiscontinuity)
          {
            LogDebug("vid: set discontinuity");
            pSample->SetDiscontinuity(true);
            pSample->SetMediaType(buffer->pmt);
            m_bDiscontinuity = false;
          }

          rtCorrectedStartTime = buffer->rtStart - m_rtStreamTimeOffset;
          rtCorrectedStopTime = buffer->rtStop - m_rtStreamTimeOffset;

          pSample->SetTime(&rtCorrectedStartTime, &rtCorrectedStopTime);

          if (m_bInitDuration)
          {
            m_pFilter->SetTitleDuration(m_rtTitleDuration);
            m_pFilter->ResetPlaybackOffset(buffer->rtPlaylistTime - rtCorrectedStartTime);
            m_bInitDuration = false;
          }

          m_pFilter->OnPlaybackPositionChange();
        }
        else // Buffer has no timestamp
          pSample->SetTime(NULL, NULL);

        pSample->SetSyncPoint(buffer->bSyncPoint);

        {
          CAutoLock lock(&m_csDeliver);

          if (!m_bFlushing)
          {
            BYTE* pSampleBuffer;
            pSample->SetActualDataLength(buffer->GetDataSize());
            pSample->GetPointer(&pSampleBuffer);
            memcpy(pSampleBuffer, buffer->GetData(), buffer->GetDataSize());

            m_bFirstSample = false;

#ifdef LOG_VIDEO_PIN_SAMPLES
            LogDebug("vid: %6.3f corr %6.3f playlist time %6.3f clip: %d playlist: %d size: %d", buffer->rtStart / 10000000.0, rtCorrectedStartTime / 10000000.0, 
              buffer->rtPlaylistTime / 10000000.0, buffer->nClipNumber, buffer->nPlaylist, buffer->GetCount());
#endif
          }
          else
          {
            LogDebug("vid: dropped sample as flush is active!");
            return ERROR_NO_DATA;
          }
        }

        //static int iFrameNumber = 0;
        //LogMediaSample(pSample, iFrameNumber++);

        delete buffer;
      }
    } while (!buffer);
    return NOERROR;
  }

  catch(...)
  {
    LogDebug("vid: FillBuffer exception");
  }

  return S_OK;
}
Example #26
0
// given a specific media type, attempt a connection (includes
// checking that the type is acceptable to this pin)
HRESULT
CBasePin::AttemptConnection(
							IPin* pReceivePin,      // connect to this pin
							const CMediaType* pmt   // using this type
							)
{
	// The caller should hold the filter lock becasue this function
	// uses m_Connected.  The caller should also hold the filter lock
	// because this function calls SetMediaType(), IsStopped() and
	// CompleteConnect().
	ASSERT(CritCheckIn(m_pLock));

	// Check that the connection is valid  -- need to do this for every
	// connect attempt since BreakConnect will undo it.
	HRESULT hr = CheckConnect(pReceivePin);
	if (FAILED(hr)) {
		DbgLog((LOG_TRACE, CONNECT_TRACE_LEVEL, TEXT("CheckConnect failed")));

		// Since the procedure is already returning an error code, there
		// is nothing else this function can do to report the error.
		EXECUTE_ASSERT( SUCCEEDED( BreakConnect() ) );

		return hr;
	}

	DisplayTypeInfo(pReceivePin, pmt);

	/* Check we will accept this media type */

	hr = CheckMediaType(pmt);
	if (hr == NOERROR) {

		/*  Make ourselves look connected otherwise ReceiveConnection
		may not be able to complete the connection
		*/
		m_Connected = pReceivePin;
		m_Connected->AddRef();
		hr = SetMediaType(pmt);
		if (SUCCEEDED(hr)) {
			/* See if the other pin will accept this type */

			hr = pReceivePin->ReceiveConnection((IPin *)this, pmt);
			if (SUCCEEDED(hr)) {
				/* Complete the connection */

				hr = CompleteConnect(pReceivePin);
				if (SUCCEEDED(hr)) {
					return hr;
				} else {
					DbgLog((LOG_TRACE,
						CONNECT_TRACE_LEVEL,
						TEXT("Failed to complete connection")));
					pReceivePin->Disconnect();
				}
			}
		}
	} else {
		// we cannot use this media type

		// return a specific media type error if there is one
		// or map a general failure code to something more helpful
		// (in particular S_FALSE gets changed to an error code)
		if (SUCCEEDED(hr) ||
			(hr == E_FAIL) ||
			(hr == E_INVALIDARG)) {
				hr = VFW_E_TYPE_NOT_ACCEPTED;
		}
	}

	// BreakConnect and release any connection here in case CheckMediaType
	// failed, or if we set anything up during a call back during
	// ReceiveConnection.

	// Since the procedure is already returning an error code, there
	// is nothing else this function can do to report the error.
	EXECUTE_ASSERT( SUCCEEDED( BreakConnect() ) );

	/*  If failed then undo our state */
	if (m_Connected) {
		m_Connected->Release();
		m_Connected = NULL;
	}

	return hr;
}
Example #27
0
HRESULT CVideoPin::FillBuffer(IMediaSample *pSample)
{
  try
  {
    CDeMultiplexer& demux = m_pTsReaderFilter->GetDemultiplexer();
    CBuffer* buffer = NULL;    
    bool earlyStall = false;

    //get file-duration and set m_rtDuration
    GetDuration(NULL);

    do
    {
      //Check if we need to wait for a while
      DWORD timeNow = GET_TIME_NOW();
      while (timeNow < (m_LastFillBuffTime + m_FillBuffSleepTime))
      {      
        Sleep(1);
        timeNow = GET_TIME_NOW();
      }
      m_LastFillBuffTime = timeNow;

      //did we reach the end of the file
      if (demux.EndOfFile())
      {
        int ACnt, VCnt;
        demux.GetBufferCounts(&ACnt, &VCnt);
        if (ACnt <= 0 && VCnt <= 0) //have we used all the data ?
        {
          LogDebug("vidPin:set eof");
          m_FillBuffSleepTime = 5;
          CreateEmptySample(pSample);
          m_bInFillBuffer = false;
          return S_FALSE; //S_FALSE will notify the graph that end of file has been reached
        }
      }

      //if the filter is currently seeking to a new position
      //or this pin is currently seeking to a new position then
      //we dont try to read any packets, but simply return...
      if (m_pTsReaderFilter->IsSeeking() || m_pTsReaderFilter->IsStopping() || demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated)
      {
        m_FillBuffSleepTime = 5;
        CreateEmptySample(pSample);
        m_bInFillBuffer = false;
        if (demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated)
        {
          //Force discon on next good sample
          m_sampleCount = 0;
          m_bDiscontinuity=true;
        }
        return NOERROR;
      }
      else
      {
        m_FillBuffSleepTime = 1;
        m_bInFillBuffer = true;
      }     
                  
      // Get next video buffer from demultiplexer
      buffer=demux.GetVideo(earlyStall);

      if (buffer == NULL)
      {
        m_FillBuffSleepTime = 5;
      }
      else if (buffer->Length() > m_bufferSize)
      {
        //discard buffer
        delete buffer;
        demux.EraseVideoBuff();
        m_bDiscontinuity = TRUE; //Next good sample will be discontinuous
        buffer = NULL;
        m_FillBuffSleepTime = 1;
        LogDebug("vidPin : Error - buffer too large for sample") ;        
      }
      else
      {
        m_bPresentSample = true ;
        
        CRefTime RefTime, cRefTime;
        double fTime = 0.0;
        double clock = 0.0;
        double stallPoint = VIDEO_STALL_POINT;
        //check if it has a timestamp
        bool HasTimestamp=buffer->MediaTime(RefTime);
        if (HasTimestamp)
        {
          bool ForcePresent = false;
          CRefTime compTemp = m_pTsReaderFilter->GetCompensation();
          if (m_pTsReaderFilter->m_bFastSyncFFDShow && (compTemp != m_llLastComp))
          {
            m_bDiscontinuity = true;
          }
          m_llLastComp = compTemp;
          cRefTime = RefTime;
          cRefTime -= m_rtStart;
          //adjust the timestamp with the compensation
          cRefTime -= compTemp;
          cRefTime -= m_pTsReaderFilter->m_ClockOnStart.m_time;
          
          // 'fast start' timestamp modification, during first (AddVideoComp + 1 sec) of play
          double fsAdjLimit = (1.0 * (double)m_pTsReaderFilter->AddVideoComp.m_time) + (double)FS_ADDON_LIM; //(1 * vid comp) + 1 second
          if (m_pTsReaderFilter->m_EnableSlowMotionOnZapping && ((double)cRefTime.m_time < fsAdjLimit) )
          {
            //float startCref = (float)cRefTime.m_time/(1000*10000); //used in LogDebug below only
            //Assume desired timestamp span is zero to fsAdjLimit, actual span is AddVideoComp to fsAdjLimit
            double offsetRatio = fsAdjLimit/(double)FS_ADDON_LIM; // == fsAdjLimit/(fsAdjLimit - (double)m_pTsReaderFilter->AddVideoComp.m_time);
            double currOffset = fsAdjLimit - (double)cRefTime.m_time;
            double newOffset = currOffset * offsetRatio;
            cRefTime = (fsAdjLimit > newOffset) ? (REFERENCE_TIME)(fsAdjLimit - newOffset) : 0;  //Don't allow negative cRefTime
            ForcePresent = true;
            //LogDebug("VFS cOfs %03.3f, nOfs %03.3f, cRefTimeS %03.3f, cRefTimeN %03.3f", (float)currOffset/(1000*10000), (float)newOffset/(1000*10000), startCref, (float)cRefTime.m_time/(1000*10000));         
            if (m_pTsReaderFilter->m_bFastSyncFFDShow)
            {
              m_delayedDiscont = 2; //Force I-frame timestamp updates for FFDShow
            }
          }          

          REFERENCE_TIME RefClock = 0;
          m_pTsReaderFilter->GetMediaPosition(&RefClock) ;
          clock = (double)(RefClock-m_rtStart.m_time)/10000000.0 ;
          fTime = ((double)(cRefTime.m_time + m_pTsReaderFilter->m_ClockOnStart.m_time)/10000000.0) - clock ;
                                                                      
          if (m_dRateSeeking == 1.0)
          {
            if ((fTime < -2.0) && (m_pTsReaderFilter->State() == State_Running) && (clock > 8.0) && !ForcePresent && !demux.m_bFlushDelegated)
            {                            
              //Very late - request internal flush and re-sync to stream
              demux.DelegatedFlush(false, false);
              LogDebug("vidPin : Video to render very late, flushing") ;
            }

            //Discard late samples at start of play,
            //and samples outside a sensible timing window during play 
            //(helps with signal corruption recovery)
            if ((fTime > (ForcePresent ? -1.0 : -0.3)) && (fTime < (demux.m_dVidPTSJumpLimit + 1.0))   )
            {
              if ((fTime > stallPoint) && (m_sampleCount > 10))
              {
                //Too early - stall for a while to avoid over-filling of video pipeline buffers,
                //but don't enable at start of play to make sure graph starts properly
                m_FillBuffSleepTime = 10;
                buffer = NULL;
                earlyStall = true;
                continue;
              }
            }
            else
            {              
              // Sample is too late.
              m_bPresentSample = false ;
            }
          }
          else if ((fTime < -1.0) || (fTime > 3.0)) //Fast-forward limits
          {
            // Sample is too late.
            m_bPresentSample = false ;
          }
          cRefTime += m_pTsReaderFilter->m_ClockOnStart.m_time;
        }

        if (m_bPresentSample && (buffer->Length() > 0))
        {
          
          //do we need to set the discontinuity flag?
          if (m_bDiscontinuity || buffer->GetDiscontinuity())
          {
            if ((m_sampleCount == 0) && m_bAddPMT && !m_pTsReaderFilter->m_bDisableAddPMT && !m_bPinNoAddPMT)
            {
              //Add MediaType info to first sample after OnThreadStartPlay()
              CMediaType mt; 
              if (demux.GetVideoStreamType(mt))
              {
                pSample->SetMediaType(&mt); 
                SetMediaType(&mt);               
                LogDebug("vidPin: Add pmt and set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount);
              }
              else
              {
                LogDebug("vidPin: Add pmt failed - set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount);
              }
              m_bAddPMT = false; //Only add once each time
            }   
            else
            {        
              LogDebug("vidPin: Set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount);
            }

            pSample->SetDiscontinuity(TRUE);           
            m_bDiscontinuity=FALSE;
          }

          //LogDebug("vidPin: video buffer type = %d", buffer->GetVideoServiceType());

          if (HasTimestamp)
          {
            //now we have the final timestamp, set timestamp in sample
            REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTime;
            pSample->SetSyncPoint(TRUE);
            
            bool stsDiscon = TimestampDisconChecker(refTime); //Update with current timestamp

            refTime = (REFERENCE_TIME)((double)refTime/m_dRateSeeking);
            pSample->SetTime(&refTime,&refTime);
            if (m_pTsReaderFilter->m_bFastSyncFFDShow && (m_dRateSeeking == 1.0))
            {
              if (stsDiscon || (pSample->IsDiscontinuity()==S_OK))
              {
                pSample->SetDiscontinuity(TRUE);
                m_delayedDiscont = 2;
              }

              if ((m_delayedDiscont > 0) && (buffer->GetFrameType() == 'I'))
              {
                if ((buffer->GetVideoServiceType() == SERVICE_TYPE_VIDEO_MPEG1 ||
                     buffer->GetVideoServiceType() == SERVICE_TYPE_VIDEO_MPEG2))
                {
                   //Use delayed discontinuity
                   pSample->SetDiscontinuity(TRUE);
                   m_delayedDiscont--;
                   LogDebug("vidPin:set I-frame discontinuity, count %d", m_delayedDiscont);
                }
                else
                {
                   m_delayedDiscont = 0;
                }      
              }                             
            }

            if (m_pTsReaderFilter->m_ShowBufferVideo || ((fTime < 0.02) && (m_dRateSeeking == 1.0)) || (m_sampleCount < 3))
            {
              int cntA, cntV;
              CRefTime firstAudio, lastAudio;
              CRefTime firstVideo, lastVideo, zeroVideo;
              cntA = demux.GetAudioBufferPts(firstAudio, lastAudio); 
              cntV = demux.GetVideoBufferPts(firstVideo, lastVideo, zeroVideo);

              LogDebug("Vid/Ref : %03.3f, %c-frame(%02d), Compensated = %03.3f ( %0.3f A/V buffers=%02d/%02d), Clk : %f, SampCnt %d, stallPt %03.3f", (float)RefTime.Millisecs()/1000.0f,buffer->GetFrameType(),buffer->GetFrameCount(), (float)cRefTime.Millisecs()/1000.0f, fTime, cntA,cntV,clock, m_sampleCount, (float)stallPoint);              
            }
            
            if (m_pTsReaderFilter->m_ShowBufferVideo) m_pTsReaderFilter->m_ShowBufferVideo--;
          }
          else
          {
            //buffer has no timestamp
            pSample->SetTime(NULL,NULL);
            pSample->SetSyncPoint(FALSE);
          }
          
          // copy buffer into the sample
          BYTE* pSampleBuffer;
          pSample->SetActualDataLength(buffer->Length());
          pSample->GetPointer(&pSampleBuffer);
          memcpy(pSampleBuffer,buffer->Data(),buffer->Length());
                    
          // delete the buffer
          delete buffer;
          demux.EraseVideoBuff();
          //m_sampleCount++ ;         
        }
        else
        { // Buffer was not displayed because it was out of date, search for next.
          delete buffer;
          demux.EraseVideoBuff();
          m_bDiscontinuity = TRUE; //Next good sample will be discontinuous
          buffer = NULL;
          m_FillBuffSleepTime = 1;
        }
      }      
      earlyStall = false;
    } while (buffer == NULL);

    m_bInFillBuffer = false;
    return NOERROR;
  }

  catch(...)
  {
    LogDebug("vidPin:fillbuffer exception");
  }
  m_FillBuffSleepTime = 5;
  CreateEmptySample(pSample);
  m_bDiscontinuity = TRUE; //Next good sample will be discontinuous
  m_bInFillBuffer = false;  
  return NOERROR;
}