Exemplo n.º 1
0
HRESULT CBaseVideoFilter::GetDeliveryBuffer(int w, int h, IMediaSample** ppOut)
{
    CheckPointer(ppOut, E_POINTER);

    HRESULT hr;

    if (FAILED(hr = ReconnectOutput(w, h))) {
        return hr;
    }

    if (FAILED(hr = m_pOutput->GetDeliveryBuffer(ppOut, NULL, NULL, 0))) {
        return hr;
    }

    AM_MEDIA_TYPE* pmt;
    if (SUCCEEDED((*ppOut)->GetMediaType(&pmt)) && pmt) {
        CMediaType mt = *pmt;
        m_pOutput->SetMediaType(&mt);
        DeleteMediaType(pmt);
    }

    (*ppOut)->SetDiscontinuity(FALSE);
    (*ppOut)->SetSyncPoint(TRUE);

    // FIXME: hell knows why but without this the overlay mixer starts very skippy
    // (don't enable this for other renderers, the old for example will go crazy if you do)
    if (GetCLSID(m_pOutput->GetConnected()) == CLSID_OverlayMixer) {
        (*ppOut)->SetDiscontinuity(TRUE);
    }

    return S_OK;
}
Exemplo n.º 2
0
STDMETHODIMP TtextInputPin::ReceiveConnection(IPin* pConnector, const AM_MEDIA_TYPE* pmt)
{
    DPRINTF(_l("TtextInputPin::ReceiveConnection"));
    utf8=false;
    const CLSID &ref=GetCLSID(pConnector);
    ismatroska=false;
    if ( searchPrevNextFilter(PINDIR_INPUT,pConnector,CLSID_HaaliMediaSplitter)
            || searchPrevNextFilter(PINDIR_INPUT,pConnector,CLSID_MPC_MatroskaSplitter)
            || searchPrevNextFilter(PINDIR_INPUT,pConnector,CLSID_GabestMatroskaSplitter)
            || searchPrevNextFilter(PINDIR_INPUT,pConnector,CLSID_LAVFSplitter)
            || searchPrevNextFilter(PINDIR_INPUT,pConnector,CLSID_LAVFSplitterSource)) {
        ismatroska=true;
    }
#if 0
    PIN_INFO pininfo;
    FILTER_INFO filterinfo;
    pConnector->QueryPinInfo(&pininfo);
    if (pininfo.pFilter) {
        pininfo.pFilter->QueryFilterInfo(&filterinfo);
        DPRINTF (_l("TtextInputPin::CompleteConnect filter=%s pin=%s"),filterinfo.achName,pininfo.achName);
        if (filterinfo.pGraph) {
            filterinfo.pGraph->Release();
        }
        pininfo.pFilter->Release();
    }
    DPRINTF(_l("CLSID 0x%x,0x%x,0x%x"),ref.Data1,ref.Data2,ref.Data3);
    for(int i=0; i<8; i++) {
        DPRINTF(_l(",0x%2x"),ref.Data4[i]);
    }
#endif

    return CDeCSSInputPin::ReceiveConnection(pConnector,pmt);
}
Exemplo n.º 3
0
STDMETHODIMP CStreamSwitcherInputPin::NonDelegatingQueryInterface(REFIID riid, void** ppv)
{
    return
        QI(IStreamSwitcherInputPin)
        IsConnected() && GetCLSID(GetFilterFromPin(GetConnected())) == __uuidof(NeroAudioDecoder) && QI(IPinConnection)
        __super::NonDelegatingQueryInterface(riid, ppv);
}
Exemplo n.º 4
0
HRESULT CStreamSwitcherOutputPin::CheckConnect(IPin* pPin)
{
    CComPtr<IBaseFilter> pBF = GetFilterFromPin(pPin);

    return
        IsAudioWaveRenderer(pBF) || GetCLSID(pBF) == __uuidof(MatrixMixer)
        ? __super::CheckConnect(pPin)
        : E_FAIL;

    //  return CComQIPtr<IPinConnection>(pPin) ? CBaseOutputPin::CheckConnect(pPin) : E_NOINTERFACE;
    //  return CBaseOutputPin::CheckConnect(pPin);
}
Exemplo n.º 5
0
// alloc output buffer
HRESULT TffdshowDecVideoDXVA::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest)
{
    DPRINTF(_l("TffdshowDecVideoDXVA::DecideBufferSize"));
    if (m_pInput->IsConnected()==FALSE) {
        return E_UNEXPECTED;
    }

    if (!presetSettings) {
        initPreset();
    }
    if (m_IsQueueListedApp==-1) { // Not initialized
        m_IsQueueListedApp= IsQueueListedApp(getExeflnm());
    }


    TvideoCodecDec *pDecoder=NULL;
    getMovieSource((const TvideoCodecDec**)&pDecoder);
    if (pDecoder != NULL && pDecoder->useDXVA()==2) { // DXVA2 : allocator must be allocated inside the decoder (DXVA1 : allocator managed by the renderer)
        TvideoCodecLibavcodecDxva *pDecoderDxva = (TvideoCodecLibavcodecDxva*)pDecoder;
        HRESULT hr;
        ALLOCATOR_PROPERTIES Actual;

        ppropInputRequest->cBuffers = pDecoderDxva->getPicEntryNumber();

        if(FAILED(hr = pAlloc->SetProperties(ppropInputRequest, &Actual))) {
            return hr;
        }

        return ppropInputRequest->cBuffers > Actual.cBuffers || ppropInputRequest->cbBuffer > Actual.cbBuffer
               ? E_FAIL : NOERROR;
    }

    m_IsOldVideoRenderer= IsOldRenderer();
    const CLSID &ref=GetCLSID(m_pOutput->GetConnected());
    if (isQueue==-1) {
        isQueue=presetSettings->multiThread && m_IsQueueListedApp;
    }
    // Queue and Overlay Mixer works only in MPC and
    // when Overlay Mixer is not connected to old video renderer(rare, usually RGB out).
    // If queue can't work with Overlay Mixer, IsOldRenderer() returns true.
    isQueue=isQueue && !m_IsOldVideoRenderer &&
            (ref==CLSID_OverlayMixer || ref==CLSID_VideoMixingRenderer || ref==CLSID_VideoMixingRenderer9);
    m_IsOldVMR9RenderlessAndRGB=IsOldVMR9RenderlessAndRGB();
    isQueue=isQueue && !(m_IsOldVMR9RenderlessAndRGB); // inform MPC about queue only when queue is effective.
    // DPRINTF(_l("CLSID 0x%x,0x%x,0x%x"),ref.Data1,ref.Data2,ref.Data3);for(int i=0;i<8;i++) {DPRINTF(_l(",0x%2x"),ref.Data4[i]);}
    if (ref==CLSID_VideoRenderer || ref==CLSID_OverlayMixer) {
        return DecideBufferSizeOld(pAlloc, ppropInputRequest,ref);
    } else {
        return DecideBufferSizeVMR(pAlloc, ppropInputRequest,ref);
    }
}
Exemplo n.º 6
0
ITypeInfo * tLuaCOM::GetDefaultEventsInterface()
{
  CLSID clsid = GetCLSID();
  if(clsid == IID_NULL)
    return NULL;
  
  tCOMPtr<ITypeInfo> coclassinfo;
  coclassinfo.Attach(tCOMUtil::GetCoClassTypeInfo(pdisp, clsid));
  if(!coclassinfo)
    return NULL;
  
  ITypeInfo *ptinfo = tCOMUtil::GetDefaultInterfaceTypeInfo(coclassinfo, true);

  return ptinfo;
}
Exemplo n.º 7
0
ITypeInfo * tLuaCOM::GetDefaultEventsInterface()
{
  CLSID clsid = GetCLSID();

  if(clsid == IID_NULL)
    return NULL;
  
  ITypeInfo* coclassinfo = tCOMUtil::GetCoClassTypeInfo(pdisp, clsid);

  if(!coclassinfo)
    return NULL;
  
  ITypeInfo *ptinfo = tCOMUtil::GetDefaultInterfaceTypeInfo(coclassinfo, true);
  COM_RELEASE(coclassinfo);

  return ptinfo;
}
Exemplo n.º 8
0
void CBaseSplitterOutputPin::MakeISCRHappy()
{
    CComPtr<IPin> pPinTo = this, pTmp;
    while (pPinTo && SUCCEEDED(pPinTo->ConnectedTo(&pTmp)) && (pPinTo = pTmp)) {
        pTmp = nullptr;

        CComPtr<IBaseFilter> pBF = GetFilterFromPin(pPinTo);

        if (GetCLSID(pBF) == GUIDFromCString(_T("{48025243-2D39-11CE-875D-00608CB78066}"))) { // ISCR
            CAutoPtr<Packet> p(DEBUG_NEW Packet());
            p->TrackNumber = DWORD_ERROR;
            p->rtStart = -1;
            p->rtStop = 0;
            p->bSyncPoint = FALSE;
            p->SetData(" ", 2);
            QueuePacket(p);
            break;
        }

        pPinTo = GetFirstPin(pBF, PINDIR_OUTPUT);
    }
}
Exemplo n.º 9
0
HRESULT TffdshowDecVideoDXVA::CompleteConnect(PIN_DIRECTION direction,IPin *pReceivePin)
{
    if (direction==PINDIR_INPUT) {
        DPRINTF(_l("TffdshowDecVideoDXVA::CompleteConnect input"));
    } else if (direction==PINDIR_OUTPUT) {
        DPRINTF(_l("TffdshowDecVideoDXVA::CompleteConnect output"));
        TvideoCodecDec *pDecoder=NULL;
        getMovieSource((const TvideoCodecDec**)&pDecoder);
        if (pDecoder != NULL && pDecoder->useDXVA()!=0) {
            TvideoCodecLibavcodecDxva *pDecoderDxva = (TvideoCodecLibavcodecDxva*)pDecoder;
            if (!pDecoderDxva->checkDXVAMode(pReceivePin)) {
                DPRINTF(_l("TffdshowDecVideoDXVA::CompleteConnect output DXVA not supported"));
                return E_FAIL;
            }
        } else {
            DPRINTF(_l("TffdshowDecVideoDXVA::CompleteConnect output DXVA 1 & 2 not supported"));
            return E_FAIL;
        }
        const CLSID &out=GetCLSID(m_pOutput->GetConnected());
        outOverlayMixer=!!(out==CLSID_OverlayMixer);
    }
    return CTransformFilter::CompleteConnect(direction,pReceivePin);
}
Exemplo n.º 10
0
DWORD CBaseSplitterOutputPin::ThreadProc()
{
    SetThreadName(DWORD(-1), "CBaseSplitterOutputPin");
    m_hrDeliver = S_OK;
    m_fFlushing = m_fFlushed = false;
    m_eEndFlush.Set();

    // fix for Microsoft DTV-DVD Video Decoder - video freeze after STOP/PLAY
    bool iHaaliRenderConnect = false;
    CComPtr<IPin> pPinTo = this, pTmp;
    while (pPinTo && SUCCEEDED(pPinTo->ConnectedTo(&pTmp)) && (pPinTo = pTmp)) {
        pTmp = nullptr;
        CComPtr<IBaseFilter> pBF = GetFilterFromPin(pPinTo);
        if (GetCLSID(pBF) == CLSID_DXR) { // Haali Renderer
            iHaaliRenderConnect = true;
            break;
        }
        pPinTo = GetFirstPin(pBF, PINDIR_OUTPUT);
    }
    if (IsConnected() && !iHaaliRenderConnect) {
        GetConnected()->BeginFlush();
        GetConnected()->EndFlush();
    }

    for (;;) {
        Sleep(1);

        DWORD cmd;
        if (CheckRequest(&cmd)) {
            m_hThread = nullptr;
            cmd = GetRequest();
            Reply(S_OK);
            ASSERT(cmd == CMD_EXIT);
            return 0;
        }

        int cnt = 0;
        do {
            CAutoPtr<Packet> p;

            {
                CAutoLock cAutoLock(&m_queue);
                if ((cnt = m_queue.GetCount()) > 0) {
                    p = m_queue.Remove();
                }
            }

            if (S_OK == m_hrDeliver && cnt > 0) {
                ASSERT(!m_fFlushing);

                m_fFlushed = false;

                // flushing can still start here, to release a blocked deliver call

                HRESULT hr = p
                             ? DeliverPacket(p)
                             : DeliverEndOfStream();

                m_eEndFlush.Wait(); // .. so we have to wait until it is done

                if (hr != S_OK && !m_fFlushed) { // and only report the error in m_hrDeliver if we didn't flush the stream
                    // CAutoLock cAutoLock(&m_csQueueLock);
                    m_hrDeliver = hr;
                    break;
                }
            }
        } while (--cnt > 0);
    }
}
Exemplo n.º 11
0
HRESULT CDeinterlacerFilter::CheckConnect(PIN_DIRECTION dir, IPin* pPin)
{
	return GetCLSID(pPin) == __uuidof(*this) ? E_FAIL : S_OK;
}
Exemplo n.º 12
0
HRESULT CStreamSwitcherInputPin::CompleteConnect(IPin* pReceivePin)
{
    HRESULT hr = __super::CompleteConnect(pReceivePin);
    if (FAILED(hr)) {
        return hr;
    }

    (static_cast<CStreamSwitcherFilter*>(m_pFilter))->CompleteConnect(PINDIR_INPUT, this, pReceivePin);

    m_fCanBlock = false;
    bool fForkedSomewhere = false;

    CStringW fileName;
    CStringW pinName;

    IPin* pPin = (IPin*)this;
    IBaseFilter* pBF = (IBaseFilter*)m_pFilter;

    pPin = GetUpStreamPin(pBF, pPin);
    if (pPin) {
        pBF = GetFilterFromPin(pPin);
    }
    while (pPin && pBF) {
        if (IsSplitter(pBF)) {
            pinName = GetPinName(pPin);
        }

        CLSID clsid = GetCLSID(pBF);
        if (clsid == CLSID_AviSplitter || clsid == CLSID_OggSplitter) {
            m_fCanBlock = true;
        }

        int nIn, nOut, nInC, nOutC;
        CountPins(pBF, nIn, nOut, nInC, nOutC);
        fForkedSomewhere = fForkedSomewhere || nIn > 1 || nOut > 1;

        DWORD cStreams = 0;
        if (CComQIPtr<IAMStreamSelect> pSSF = pBF) {
            hr = pSSF->Count(&cStreams);
            if (SUCCEEDED(hr)) {
                for (int i = 0; i < (int)cStreams; i++) {
                    AM_MEDIA_TYPE* pmt = nullptr;
                    hr = pSSF->Info(i, &pmt, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr);
                    if (SUCCEEDED(hr) && pmt && pmt->majortype == MEDIATYPE_Audio) {
                        m_pSSF = pSSF;
                        DeleteMediaType(pmt);
                        break;
                    }
                    DeleteMediaType(pmt);
                }
            }
        }

        if (CComQIPtr<IFileSourceFilter> pFSF = pBF) {
            WCHAR* pszName = nullptr;
            AM_MEDIA_TYPE mt;
            if (SUCCEEDED(pFSF->GetCurFile(&pszName, &mt)) && pszName) {
                fileName = pszName;
                CoTaskMemFree(pszName);

                fileName.Replace('\\', '/');
                CStringW fn = fileName.Mid(fileName.ReverseFind('/') + 1);
                if (!fn.IsEmpty()) {
                    fileName = fn;
                }

                // Haali & LAVFSplitter return only one "Audio" pin name, cause CMainFrame::OnInitMenuPopup lookup find the wrong popmenu,
                // add space at the end to prevent this, internal filter never return "Audio" only.
                if (!pinName.IsEmpty()) {
                    fileName = pinName + L" ";
                }

                WCHAR* pName = DEBUG_NEW WCHAR[fileName.GetLength() + 1];
                if (pName) {
                    wcscpy_s(pName, fileName.GetLength() + 1, fileName);
                    if (m_pName) {
                        delete [] m_pName;
                    }
                    m_pName = pName;
                    if (cStreams == 1) { // Simple external track, no need to use the info from IAMStreamSelect
                        m_pSSF.Release();
                    }
                }
            }

            break;
        }

        pPin = GetFirstPin(pBF);

        pPin = GetUpStreamPin(pBF, pPin);
        if (pPin) {
            pBF = GetFilterFromPin(pPin);
        }
    }

    if (!fForkedSomewhere) {
        m_fCanBlock = true;
    }

    m_hNotifyEvent = nullptr;

    return S_OK;
}
Exemplo n.º 13
0
HRESULT CBaseVideoFilter::ReconnectOutput(int w, int h, bool bSendSample, int realWidth, int realHeight)
{
    CMediaType& mt = m_pOutput->CurrentMediaType();

    bool m_update_aspect = false;
    if (f_need_set_aspect) {
        int wout = 0, hout = 0, arxout = 0, aryout = 0;
        ExtractDim(&mt, wout, hout, arxout, aryout);
        if (arxout != m_arx || aryout != m_ary) {
            TRACE(_T("\nCBaseVideoFilter::ReconnectOutput; wout = %d, hout = %d, current = %dx%d, set = %dx%d\n"), wout, hout, arxout, aryout, m_arx, m_ary);
            m_update_aspect = true;
        }
    }

    int w_org = m_w;
    int h_org = m_h;

    bool fForceReconnection = false;
    if (w != m_w || h != m_h) {
        fForceReconnection = true;
        m_w = w;
        m_h = h;
    }

    HRESULT hr = S_OK;

    if (m_update_aspect || fForceReconnection || m_w != m_wout || m_h != m_hout || m_arx != m_arxout || m_ary != m_aryout) {
        if (GetCLSID(m_pOutput->GetConnected()) == CLSID_VideoRenderer) {
            NotifyEvent(EC_ERRORABORT, 0, 0);
            return E_FAIL;
        }

        BITMAPINFOHEADER* bmi = NULL;

        if (mt.formattype == FORMAT_VideoInfo) {
            VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)mt.Format();
            if (realWidth > 0 && realHeight > 0) {
                SetRect(&vih->rcSource, 0, 0, realWidth, realHeight);
                SetRect(&vih->rcTarget, 0, 0, realWidth, realHeight);
            } else {
                SetRect(&vih->rcSource, 0, 0, m_w, m_h);
                SetRect(&vih->rcTarget, 0, 0, m_w, m_h);
            }
            bmi = &vih->bmiHeader;
            bmi->biXPelsPerMeter = m_w * m_ary;
            bmi->biYPelsPerMeter = m_h * m_arx;
        } else if (mt.formattype == FORMAT_VideoInfo2) {
            VIDEOINFOHEADER2* vih = (VIDEOINFOHEADER2*)mt.Format();
            if (realWidth > 0 && realHeight > 0) {
                SetRect(&vih->rcSource, 0, 0, realWidth, realHeight);
                SetRect(&vih->rcTarget, 0, 0, realWidth, realHeight);
            } else {
                SetRect(&vih->rcSource, 0, 0, m_w, m_h);
                SetRect(&vih->rcTarget, 0, 0, m_w, m_h);
            }
            bmi = &vih->bmiHeader;
            vih->dwPictAspectRatioX = m_arx;
            vih->dwPictAspectRatioY = m_ary;
        } else {
            return E_FAIL;  //should never be here? prevent null pointer refs for bmi
        }

        bmi->biWidth = m_w;
        bmi->biHeight = m_h;
        bmi->biSizeImage = m_w * m_h * bmi->biBitCount >> 3;

        hr = m_pOutput->GetConnected()->QueryAccept(&mt);
        ASSERT(SUCCEEDED(hr)); // should better not fail, after all "mt" is the current media type, just with a different resolution
        HRESULT hr1 = 0;
        CComPtr<IMediaSample> pOut;
        if (SUCCEEDED(hr1 = m_pOutput->GetConnected()->ReceiveConnection(m_pOutput, &mt))) {
            if (bSendSample) {
                HRESULT hr2 = 0;
                if (SUCCEEDED(hr2 = m_pOutput->GetDeliveryBuffer(&pOut, NULL, NULL, 0))) {
                    AM_MEDIA_TYPE* pmt;
                    if (SUCCEEDED(pOut->GetMediaType(&pmt)) && pmt) {
                        CMediaType mt2 = *pmt;
                        m_pOutput->SetMediaType(&mt2);
                        DeleteMediaType(pmt);
                    } else { // stupid overlay mixer won't let us know the new pitch...
                        long size = pOut->GetSize();
                        bmi->biWidth = size ? (size / abs(bmi->biHeight) * 8 / bmi->biBitCount) : bmi->biWidth;
                    }
                } else {
                    m_w = w_org;
                    m_h = h_org;
                    return E_FAIL;
                }
            }
        }

        m_wout = m_w;
        m_hout = m_h;
        m_arxout = m_arx;
        m_aryout = m_ary;

        // some renderers don't send this
        NotifyEvent(EC_VIDEO_SIZE_CHANGED, MAKELPARAM(m_w, m_h), 0);

        return S_OK;
    }

    return S_FALSE;
}
Exemplo n.º 14
0
// **************************************************************************
// Connect ()
//
// Description:
//	Connect to OPC Server.  OPC Server's ProgID and machine	name must have
//  been previously specified.
//
// Parameters:
//  none
//
// Returns:
//  bool - true if success.
// **************************************************************************
bool CKServer::Connect ()
	{
	// Program ID of OPC Server should have been defined by now:
	ASSERT (!m_strProgID.IsEmpty ());

	// Assume we are not connecting to KEPServerEx:
	m_bfFlags.bIsKepServerEx = false;

	// Perform any necessary cleanup from a previous connection:
	Disconnect ();

/*
m_server.EnumClassesOfCategories( 
1, 
&catids 
0, 
NULL, 
out enumerator);
*/


	// Obtain the Class ID of the OPC Server.  (GetCLSID() will need the
	// OPC Server's Program ID to succeed.  That's why we checked it above.)
	CLSID clsid;
	bool ret = SUCCEEDED (GetCLSID (clsid));

	// 2006 debug
	//if (ret)
	if (true)
		{
		HRESULT hr;

		// Re-intialize Multi-Query Interface:
		for (int i = 0; i < sizeof (m_arrMultiQI) / sizeof (MULTI_QI); i++)
			{
			m_arrMultiQI [i].pItf = NULL;
			m_arrMultiQI [i].hr = 0;
			}

		// Load up the Interface ID's we hope to get pointers for when we
		// call CoCreateInstanceEx():
		m_arrMultiQI [MQI_IOPCSERVER].pIID		= &IID_IOPCServer;
		m_arrMultiQI [MQI_IOPCCOMMON].pIID		= &IID_IOPCCommon;
		m_arrMultiQI [MQI_IOPCCONNPT].pIID		= &IID_IConnectionPointContainer;
		m_arrMultiQI [MQI_IOPCITEMPROP].pIID	= &IID_IOPCItemProperties;
		m_arrMultiQI [MQI_IOPCBROWSE].pIID		= &IID_IOPCBrowseServerAddressSpace;
		m_arrMultiQI [MQI_IOPCPUBLIC].pIID		= &IID_IOPCServerPublicGroups;
		m_arrMultiQI [MQI_IOPCPERSIST].pIID		= &IID_IPersistFile;

		// Connect to the OPC Server and query all possible interfaces:
		if (m_strRemoteMachine.IsEmpty ())
			{
			// Since m_strRemoteMachine is empty, we will try to instantiate
			// the OPC Server on our local machine.  

			// CoCreateInstanceEx will launch the OPC Server if necessary, and
			// call its QueryInterface for us (bumping its reference count):
			hr = CoCreateInstanceEx (
				clsid,										// CLSID
				NULL,										// No aggregation
				CLSCTX_SERVER,								// connect to local, inproc and remote servers
				NULL,										// remote machine name 
				sizeof (m_arrMultiQI) / sizeof (MULTI_QI),	// number of IIDS to query		
				m_arrMultiQI);								// array of IID pointers to query
			}
		else
			{
			// Since m_strRemoteMachine is not empty, we will assume it contains
			// a valid remote machine name.  We will try to instantiate the OPC
			// Server object on the machine with that name.

			// First we need to initialize a server info structure:
			COSERVERINFO tCoServerInfo;
			ZeroMemory (&tCoServerInfo, sizeof (tCoServerInfo));

			// Allocate memory for the machine name string:
			int nSize = m_strRemoteMachine.GetLength () * sizeof (WCHAR);
			tCoServerInfo.pwszName = new WCHAR [nSize];

			// Check validity of pointer.  If it's bad, there's no point in continuing:
			if (!tCoServerInfo.pwszName)
				{
				ASSERT (FALSE);
				return (false);
				}

			// Copy the machine name string into the server info structure:
#ifdef _UNICODE
			// For Unicode builds, the contents of m_strRemoteMachine will
			// already be in wide character format, as demanded by COM, so
			// copy it as is.
			lstrcpyn (tCoServerInfo.pwszName, m_strRemoteMachine, nSize);
#else 
			// For ANSI builds, the contents of m_strRemoteMachine will not
			// be in wide character format, as demanded by COM, so we need
			// to reformat:
			mbstowcs (tCoServerInfo.pwszName, m_strRemoteMachine, nSize);
#endif//_UNICODE

			// CoCreateInstanceEx will launch the OPC Server if necessary, and
			// call its QueryInterface for us (bumping its reference count):
			hr = CoCreateInstanceEx (
				clsid,										// CLSID
				NULL,										// No aggregation
//				CLSCTX_SERVER,								// connect to local, inproc and remote servers
				CLSCTX_REMOTE_SERVER,						// lyy
				&tCoServerInfo,								// remote machine name 
				sizeof (m_arrMultiQI) / sizeof (MULTI_QI),	// number of IIDS to query		
				m_arrMultiQI);								// array of IID pointers to query

			// COM requires us to free memory allocated for [out] and [in/out]
			// arguments (i.e. name string).
			delete [] tCoServerInfo.pwszName;
			}

		// If CoCreateInstanceEx succeeded, we can check the returned 
		// interface pointers and save them as member variables:

		if (SUCCEEDED (hr))	
			{
			TRACE (_T("OTC: Initializing server %s interfaces.\r\n"), GetProgID ());

			// Check IOPCServer interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCSERVER].hr))
				{
				m_pIServer = (IOPCServer *)m_arrMultiQI [MQI_IOPCSERVER].pItf;

				if (m_pIServer == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCSERVER].pItf != NULL)
					{
					// Warning failure but pointer not set to null
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IOPCServer (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCSERVER].hr); 
				}

			// Check IOPCCommon interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCCOMMON].hr))
				{
				m_pICommon = (IOPCCommon *)m_arrMultiQI [MQI_IOPCCOMMON].pItf;

				if (m_pICommon == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCCOMMON].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IOPCCommon (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCCOMMON].hr); 
				}

			// Check IConnectionPointContainer interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCCONNPT].hr))
				{
				m_pIConnPtContainer = 
					(IConnectionPointContainer *)m_arrMultiQI [MQI_IOPCCONNPT].pItf;

				if (m_pIConnPtContainer == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCCONNPT].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IConnectionPoint (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCCONNPT].hr); 
				}

			// Check IOPCItemProperties interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCITEMPROP].hr))
				{
				m_pIItemProps = 
					(IOPCItemProperties *)m_arrMultiQI [MQI_IOPCITEMPROP].pItf;

				if (m_pIItemProps == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCITEMPROP].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IOPCItemProperties (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCITEMPROP].hr); 				
				}

			// Check IOPCBrowseServerAddressSpace interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCBROWSE].hr))
				{
				m_pIBrowse = 
					(IOPCBrowseServerAddressSpace *)m_arrMultiQI [MQI_IOPCBROWSE].pItf;

				if (m_pIBrowse == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCBROWSE].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IOPCBrowseServerAddressSpace (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCBROWSE].hr); 				
				}

			// Check IOPCServerPublicGroups interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCPUBLIC].hr))
				{
				m_pIPublicGroups = 
					(IOPCServerPublicGroups *)m_arrMultiQI [MQI_IOPCPUBLIC].pItf;

				if (m_pIPublicGroups == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCPUBLIC].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IOPCServerPublicGroups (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCPUBLIC].hr); 				
				}

			// Check IPersistFile interface pointer:
			if (SUCCEEDED (m_arrMultiQI [MQI_IOPCPERSIST].hr))
				{
				m_pIPersistFile = 
					(IPersistFile *)m_arrMultiQI [MQI_IOPCPERSIST].pItf;

				if (m_pIPersistFile == NULL)
					{
					// Warning success but no valid pointer:
					ASSERT (FALSE);
					}
				}
			else
				{
				if (m_arrMultiQI [MQI_IOPCPERSIST].pItf != NULL)
					{
					// Warning failure but pointer not set to null:
					ASSERT (FALSE);
					}

				TRACE (_T("OTC: Failed to query IPersistsFile (%08X).\r\n"), 
					m_arrMultiQI [MQI_IOPCPERSIST].hr); 				
				}

			// Check IConnectionPointContainer interface pointer:
			if (m_pIConnPtContainer != NULL)
				{
				// If the server supports the shutdown interface, provide a sink 
				// to the server.

				// Get connection point pointer:
				IConnectionPoint *pCP = NULL;
				hr = m_pIConnPtContainer->FindConnectionPoint (IID_IOPCShutdown, &pCP);

				// If we got the connection point, instantiate our shutdown sink:
				if (SUCCEEDED (hr))
					{
					try
						{
						// Instantiate the shutdown sink and add us to its reference count:
						m_pIShutdownSink = new IKShutdownSink (this);
						m_pIShutdownSink->AddRef ();
												
						// Give the connection point a pointer to our shutdown sink:
						// (m_dwCookieShutdownSink is a returned token that uniquely
						// identifies this connection.)
						hr = pCP->Advise (m_pIShutdownSink, &m_dwCookieShutdownSink);

						// We are done with the connection point, so release our reference:
						pCP->Release ();
						}
					
					catch (...)
						{
						// If we find ourselves here, either "new" failed or pCP is bad.
						ASSERT (FALSE);
						hr = E_FAIL;
						}
					}
				}

			// We will base our success on the validity of the IOPCServer interface
			// pointer.  If it is invalid, then we won't be able do do anyting:
			m_bConnected = (m_pIServer != NULL);

			// Log success or failure:
			if (m_bConnected)
				LogMsg (IDS_SERVER_CONNECT_SUCCESS, GetProgID ());
			else
				LogMsg (IDS_SERVER_REQUIRED_IID_UNSUPPORTED, GetProgID (), hr);
			}
		
		// CoCreateInstanceEx failed:
		else
			{
			// log failure
			LogMsg (IDS_SERVER_CONNECT_FAILURE, GetProgID (), hr);
			}
		}
	
	// Failed to get Class ID:
	else
		{
		// Log failure:
		LogMsg (IDS_SERVER_UNABLE_TO_GET_CLSID, GetProgID ());
		}

	// Return connected state:
	return (m_bConnected);
	}
Exemplo n.º 15
0
HRESULT CStreamSwitcherInputPin::CompleteConnect(IPin* pReceivePin)
{
	HRESULT hr = __super::CompleteConnect(pReceivePin);
	if(FAILED(hr)) return hr;

    ((CStreamSwitcherFilter*)m_pFilter)->CompleteConnect(PINDIR_INPUT, this, pReceivePin);

	m_fCanBlock = false;
	bool fForkedSomewhere = false;

	CStringW fileName;
	CStringW pinName;

    IPin* pPin = (IPin*)this;
	IBaseFilter* pBF = (IBaseFilter*)m_pFilter;

	while((pPin = GetUpStreamPin(pBF, pPin)) && (pBF = GetFilterFromPin(pPin)))
	{
		if(IsSplitter(pBF))
		{
			pinName = GetPinName(pPin);
		}

		CLSID clsid = GetCLSID(pBF);
		if(clsid == CLSID_AviSplitter || clsid == CLSID_OggSplitter)
			m_fCanBlock = true;

		int nIn, nOut, nInC, nOutC;
		CountPins(pBF, nIn, nOut, nInC, nOutC);
		fForkedSomewhere = fForkedSomewhere || nIn > 1 || nOut > 1;

		if(CComQIPtr<IFileSourceFilter> pFSF = pBF)
		{
			WCHAR* pszName = NULL;
			AM_MEDIA_TYPE mt;
			if(SUCCEEDED(pFSF->GetCurFile(&pszName, &mt)) && pszName)
			{
				fileName = pszName;
				CoTaskMemFree(pszName);

				fileName.Replace('\\', '/');
				CStringW fn = fileName.Mid(fileName.ReverseFind('/')+1);
				if(!fn.IsEmpty()) fileName = fn;

				if(!pinName.IsEmpty()) fileName += L" / " + pinName;

				WCHAR* pName = new WCHAR[fileName.GetLength()+1];
				if(pName)
				{
					wcscpy(pName, fileName);
					if(m_pName) delete [] m_pName;
					m_pName = pName;
				}
			}

			break;
		}

		pPin = GetFirstPin(pBF);
	}

	if(!fForkedSomewhere)
		m_fCanBlock = true;

	m_hNotifyEvent = NULL;

	return S_OK;
}
Exemplo n.º 16
0
// get list of supported output colorspaces
HRESULT TffdshowDecVideoDXVA::GetMediaType(int iPosition, CMediaType *mtOut)
{
    DPRINTF(_l("TffdshowDecVideoDXVA::GetMediaType"));
    CAutoLock lock(&inpin->m_csCodecs_and_imgFilters);

    if (m_pInput->IsConnected()==FALSE) {
        return E_UNEXPECTED;
    }

    if (!presetSettings) {
        initPreset();
    }

    bool isVIH2;

    if (m_pOutput->IsConnected()) {
        const CLSID &ref=GetCLSID(m_pOutput->GetConnected());
        if (ref==CLSID_VideoMixingRenderer || ref==CLSID_VideoMixingRenderer9) {
            isVIH2=true;
        }
    }

    isVIH2 = (iPosition&1)==0;

    iPosition/=2;

    if (iPosition<0) {
        return E_INVALIDARG;
    }

    TvideoCodecDec *pDecoder=NULL;
    getMovieSource((const TvideoCodecDec**)&pDecoder);
    if (!pDecoder->useDXVA()) {
        return VFW_S_NO_MORE_ITEMS;
    }

    TcspInfos ocsps;
    size_t osize;


    // DXVA mode : special output format
    TvideoCodecLibavcodecDxva *pDecoderDxva = (TvideoCodecLibavcodecDxva*)pDecoder;
    pDecoderDxva->getDXVAOutputFormats(ocsps);
    osize=ocsps.size();

    if ((size_t)iPosition>=osize) {
        return VFW_S_NO_MORE_ITEMS;
    }

    TffPictBase pictOut;
    if (inReconnect) {
        pictOut=reconnectRect;
    } else {
        pictOut=inpin->pictIn;
    }

    // Support mediatype with unknown dimension. This is necessary to support MEDIASUBTYPE_H264.
    // http://msdn.microsoft.com/en-us/library/dd757808(VS.85).aspx
    // The downstream filter has to support reconnecting after this.
    if (pictOut.rectFull.dx == 0) {
        pictOut.rectFull.dx = 320;
    }
    if (pictOut.rectFull.dy == 0) {
        pictOut.rectFull.dy = 160;
    }

    oldRect=pictOut.rectFull;

    const TcspInfo *c=ocsps[iPosition];
    BITMAPINFOHEADER bih;
    memset(&bih,0,sizeof(bih));
    bih.biSize  =sizeof(BITMAPINFOHEADER);
    bih.biWidth =pDecoderDxva->pictWidthRounded();
    if(c->id == FF_CSP_420P) {  // YV12 and odd number lines.
        pictOut.rectFull.dy=odd2even(pictOut.rectFull.dy);
    }
    bih.biHeight=pDecoderDxva->pictHeightRounded();
    bih.biPlanes=WORD(c->numPlanes);
    bih.biCompression=c->fcc;
    bih.biBitCount=WORD(c->bpp);
    bih.biSizeImage=DIBSIZE(bih);// bih.biWidth*bih.biHeight*bih.biBitCount>>3;

    mtOut->majortype=MEDIATYPE_Video;
    mtOut->subtype=*c->subtype;
    mtOut->formattype=isVIH2?FORMAT_VideoInfo2:FORMAT_VideoInfo;
    mtOut->SetTemporalCompression(FALSE);
    mtOut->SetSampleSize(bih.biSizeImage);

    if (!isVIH2) {
        VIDEOINFOHEADER *vih=(VIDEOINFOHEADER*)mtOut->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER));
        if (!vih) {
            return E_OUTOFMEMORY;
        }
        ZeroMemory(vih,sizeof(VIDEOINFOHEADER));

        vih->rcSource.left=0;
        vih->rcSource.right=pictOut.rectFull.dx;
        vih->rcSource.top=0;
        vih->rcSource.bottom=pictOut.rectFull.dy;
        vih->rcTarget=vih->rcSource;
        vih->AvgTimePerFrame=inpin->avgTimePerFrame;
        vih->bmiHeader=bih;
    } else {
        VIDEOINFOHEADER2 *vih2=(VIDEOINFOHEADER2*)mtOut->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER2));
        if (!vih2) {
            return E_OUTOFMEMORY;
        }
        ZeroMemory(vih2,sizeof(VIDEOINFOHEADER2));
        if((presetSettings->resize && presetSettings->resize->is && presetSettings->resize->SARinternally && presetSettings->resize->mode==0)) {
            pictOut.rectFull.sar.num= 1;//pictOut.rectFull.dx; // VMR9 behaves better when this is set to 1(SAR). But in reconnectOutput, it is different(DAR) in my system.
            pictOut.rectFull.sar.den= 1;//pictOut.rectFull.dy;
        }
        setVIH2aspect(vih2,pictOut.rectFull,presetSettings->output->hwOverlayAspect);

        //DPRINTF(_l("AR getMediaType: %i:%i"),vih2->dwPictAspectRatioX,vih2->dwPictAspectRatioY);

        vih2->rcSource.left=0;
        vih2->rcSource.right=pictOut.rectFull.dx;
        vih2->rcSource.top=0;
        vih2->rcSource.bottom=pictOut.rectFull.dy;
        vih2->rcTarget=vih2->rcSource;
        vih2->AvgTimePerFrame=inpin->avgTimePerFrame;
        vih2->bmiHeader=bih;
        //vih2->dwControlFlags=AMCONTROL_USED | AMCONTROL_COLORINFO_PRESENT | (DXVA_NominalRange_Wide << DXVA_NominalRangeShift) | (DXVA_VideoTransferMatrix_BT601 << DXVA_VideoTransferMatrixShift);
        hwDeinterlace=1; // HW deinterlace for DXVA

        if (hwDeinterlace) {
            vih2->dwInterlaceFlags=AMINTERLACE_IsInterlaced|AMINTERLACE_DisplayModeBobOrWeave;
        }
    }
    return S_OK;
}
Exemplo n.º 17
0
STDMETHODIMP TffdshowVideoInputPin::ReceiveConnection(IPin* pConnector, const AM_MEDIA_TYPE* pmt)
{
    HRESULT hr;
    DPRINTF(_l("TffdshowVideoInputPin::ReceiveConnection"));
    CAutoLock cObjectLock(m_pLock);
    const CLSID &ref = GetCLSID(pConnector);
    if (ref == CLSID_MPC_MatroskaSplitter || ref == CLSID_GabestMatroskaSplitter) {
        connectedSplitter = MPC_matroska_splitter;
    } else if (ref == CLSID_HaaliMediaSplitter) {
        connectedSplitter = Haali_Media_splitter;
    } else if (ref == CLSID_MPC_MpegSourceFilter || ref == CLSID_MPC_MpegSplitterFilter) {
        connectedSplitter = MPC_mpegSplitters;
    } else if (ref == CLSID_DVBSourceFilter) {
        connectedSplitter = DVBSourceFilter;
    } else if (ref == CLSID_PBDA_DTFilter) {
        connectedSplitter = PBDA_DTFilter;
    } else if (ref == CLSID_NeuviewSource) {
        connectedSplitter = NeuviewSource;
    }

#if 0
    PIN_INFO pininfo;
    FILTER_INFO filterinfo;
    pConnector->QueryPinInfo(&pininfo);
    if (pininfo.pFilter) {
        pininfo.pFilter->QueryFilterInfo(&filterinfo);
        DPRINTF(_l("TffdshowVideoInputPin::ReceiveConnection filter=%s pin=%s"), filterinfo.achName, pininfo.achName);
        if (filterinfo.pGraph) {
            filterinfo.pGraph->Release();
        }
        pininfo.pFilter->Release();
    }
    DPRINTF(_l("CLSID 0x%x,0x%x,0x%x"), ref.Data1, ref.Data2, ref.Data3);
    for (int i = 0; i < 8; i++) {
        DPRINTF(_l(",0x%2x"), ref.Data4[i]);
    }
#endif

    if (m_Connected) {
        CMediaType mt(*pmt);

        BITMAPINFOHEADER bih, bihCur;
        ExtractBIH(mt, &bih);
        ExtractBIH(m_mt, &bihCur);

        // HACK: for the intervideo filter, when it tries to change the pitch from 720 to 704...
        //if(bihCur.biWidth != bih.biWidth  && bihCur.biHeight == bih.biHeight)
        // return S_OK;

        return (CheckMediaType(&mt) != S_OK || SetMediaType(&mt) != S_OK/* || !initVideo(mt)*/)
               ? VFW_E_TYPE_NOT_ACCEPTED
               : S_OK;

        // TODO: send ReceiveConnection downstream
    } else {
        hr = fv->deci->checkInputConnect(pConnector);
        if (hr != S_OK) {
            return hr;
        }
    }

    hr = TinputPin::ReceiveConnection(pConnector, pmt);
    return hr;
}
Exemplo n.º 18
0
HRESULT CStreamSwitcherInputPin::CompleteConnect(IPin* pReceivePin)
{
	HRESULT hr = __super::CompleteConnect(pReceivePin);
	if (FAILED(hr)) {
		return hr;
	}

	(static_cast<CStreamSwitcherFilter*>(m_pFilter))->CompleteConnect(PINDIR_INPUT, this, pReceivePin);

	m_fCanBlock = false;
	bool fForkedSomewhere = false;

	CStringW fileName;
	CStringW pinName;

	IPin* pPin = (IPin*)this;
	IBaseFilter* pBF = (IBaseFilter*)m_pFilter;

	pPin = GetUpStreamPin(pBF, pPin);
	if (pPin) {
		pBF = GetFilterFromPin(pPin);
	}
	while (pPin && pBF) {
		if (IsSplitter(pBF)) {
			pinName = GetPinName(pPin);
		}

		CLSID clsid = GetCLSID(pBF);
		if (clsid == CLSID_AviSplitter || clsid == CLSID_OggSplitter) {
			m_fCanBlock = true;
		}

		int nIn, nOut, nInC, nOutC;
		CountPins(pBF, nIn, nOut, nInC, nOutC);
		fForkedSomewhere = fForkedSomewhere || nIn > 1 || nOut > 1;

		if (CComQIPtr<IFileSourceFilter> pFSF = pBF) {
			WCHAR* pszName = NULL;
			AM_MEDIA_TYPE mt;
			if (SUCCEEDED(pFSF->GetCurFile(&pszName, &mt)) && pszName) {
				fileName = pszName;
				CoTaskMemFree(pszName);

				fileName.Replace('\\', '/');
				CStringW fn = fileName.Mid(fileName.ReverseFind('/')+1);
				if (!fn.IsEmpty()) {
					fileName = fn;
				}

				// Haali & LAVFSplitter return only one "Audio" pin name, cause CMainFrame::OnInitMenuPopup lookup find the wrong popmenu,
				// add space at the end to prevent this, internal filter never return "Audio" only.
				if (!pinName.IsEmpty()) {
					fileName = pinName + L" ";
				}

				WCHAR* pName = DNew WCHAR[fileName.GetLength()+1];
				if (pName) {
					wcscpy_s(pName, fileName.GetLength() + 1, fileName);
					if (m_pName) {
						delete [] m_pName;
					}
					m_pName = pName;
				}
			}

			break;
		}

		pPin = GetFirstPin(pBF);

		pPin = GetUpStreamPin(pBF, pPin);
		if (pPin) {
			pBF = GetFilterFromPin(pPin);
		}
	}

	if (!fForkedSomewhere) {
		m_fCanBlock = true;
	}

	m_hNotifyEvent = NULL;

	return S_OK;
}
Exemplo n.º 19
0
bool TffdshowVideoInputPin::init(const CMediaType &mt)
{
    DPRINTF(_l("TffdshowVideoInputPin::init"));
    bool dont_use_rtStop_from_upper_stream = false;
    isInterlacedRawVideo = false;
    if (mt.formattype == FORMAT_VideoInfo) {
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)mt.pbFormat;
        init_VIH_and_VIH2_common_part(vih->rcSource, vih->rcTarget, vih->dwBitRate, vih->dwBitErrorRate, vih->AvgTimePerFrame, vih->bmiHeader);
    } else if (mt.formattype == FORMAT_VideoInfo2) {
        VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2*)mt.pbFormat;
        init_VIH_and_VIH2_common_part(vih2->rcSource, vih2->rcTarget, vih2->dwBitRate, vih2->dwBitErrorRate, vih2->AvgTimePerFrame, vih2->bmiHeader);
        isInterlacedRawVideo = vih2->dwInterlaceFlags & AMINTERLACE_IsInterlaced;
        pictIn.setDar(Rational(vih2->dwPictAspectRatioX, vih2->dwPictAspectRatioY));
        DPRINTF(_l("TffdshowVideoInputPin::initVideo: darX:%i, darY:%i"), vih2->dwPictAspectRatioX, vih2->dwPictAspectRatioY);
    } else if (mt.formattype == FORMAT_MPEGVideo) {
        MPEG1VIDEOINFO *mpeg1info = (MPEG1VIDEOINFO*)mt.pbFormat;
        biIn.bmiHeader = mpeg1info->hdr.bmiHeader;
        biIn.bmiHeader.biCompression = FOURCC_MPG1;
        pictIn.setSize(std::max(mpeg1info->hdr.rcSource.right, mpeg1info->hdr.bmiHeader.biWidth), std::max(mpeg1info->hdr.rcSource.bottom, mpeg1info->hdr.bmiHeader.biHeight));
    } else if (mt.formattype == FORMAT_MPEG2Video) {
        MPEG2VIDEOINFO *mpeg2info = (MPEG2VIDEOINFO*)mt.pbFormat;
        biIn.bmiHeader = mpeg2info->hdr.bmiHeader;
        pictIn.setSize(std::max(mpeg2info->hdr.rcSource.right, mpeg2info->hdr.bmiHeader.biWidth), std::max(mpeg2info->hdr.rcSource.bottom, mpeg2info->hdr.bmiHeader.biHeight));
        pictIn.setDar(Rational(mpeg2info->hdr.dwPictAspectRatioX, mpeg2info->hdr.dwPictAspectRatioY));
        if (biIn.bmiHeader.biCompression == 0 || biIn.bmiHeader.biCompression == 0x0038002d) {
            if (mt.subtype == MEDIASUBTYPE_H264_TRANSPORT) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
            } else if (mt.subtype == MEDIASUBTYPE_AVC1 || mt.subtype == MEDIASUBTYPE_avc1 || mt.subtype == MEDIASUBTYPE_H264 || mt.subtype == MEDIASUBTYPE_h264 || mt.subtype == MEDIASUBTYPE_CCV1) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
            } else {
                biIn.bmiHeader.biCompression = FOURCC_MPG2;
            }
        } else {
            biIn.bmiHeader.biCompression = FCCupper(biIn.bmiHeader.biCompression);
            dont_use_rtStop_from_upper_stream = true;
        }
    } else if (mt.formattype == FORMAT_TheoraIll) {
        memset(&biIn, 0, sizeof(biIn));
        sTheoraFormatBlock *oggFormat = (sTheoraFormatBlock*)mt.pbFormat;
        biIn.bmiHeader.biCompression = FOURCC_THEO;
        pictIn.setSize(biIn.bmiHeader.biWidth = oggFormat->width, biIn.bmiHeader.biHeight = oggFormat->height);
        pictIn.setDar(Rational(oggFormat->aspectNumerator, oggFormat->aspectDenominator));
        biIn.bmiHeader.biBitCount = 12;
    } else if (mt.formattype == FORMAT_RLTheora) {
        struct RLTheora {
            VIDEOINFOHEADER hdr;
            DWORD headerSize[3];    // 0: Header, 1: Comment, 2: Codebook
        };
        const RLTheora *rl = (const RLTheora*)mt.pbFormat;
        GetBitContext gb;
        init_get_bits(&gb, (const uint8_t*)(rl + 1), rl->headerSize[0]);
        int ptype = get_bits(&gb, 8);
        if (!(ptype & 0x80)) {
            return false;
        }
        biIn.bmiHeader.biCompression = FOURCC_THEO;
        skip_bits(&gb, 6 * 8); /* "theora" */
        int major = get_bits(&gb, 8); /* version major */
        int minor = get_bits(&gb, 8); /* version minor */
        int micro = get_bits(&gb, 8); /* version micro */
        int theora = (major << 16) | (minor << 8) | micro;

        if (theora < 0x030200) {
            ;//flipped_image = 1;
        }

        biIn.bmiHeader.biWidth = get_bits(&gb, 16) << 4;
        biIn.bmiHeader.biHeight = get_bits(&gb, 16) << 4;
        pictIn.setSize(biIn.bmiHeader.biWidth, biIn.bmiHeader.biHeight);

        skip_bits(&gb, 24); /* frame width */
        skip_bits(&gb, 24); /* frame height */

        skip_bits(&gb, 8); /* offset x */
        skip_bits(&gb, 8); /* offset y */

        skip_bits(&gb, 32); /* fps numerator */
        skip_bits(&gb, 32); /* fps denumerator */

        Rational sample_aspect_ratio;
        sample_aspect_ratio.num = get_bits(&gb, 24); /* aspect numerator */
        sample_aspect_ratio.den = get_bits(&gb, 24); /* aspect denumerator */
        pictIn.setSar(sample_aspect_ratio);
    } else {
        return false;
    }

    REFERENCE_TIME avgTimePerFrame0 = getAvgTimePerFrame(mt);
    avgTimePerFrame = avgTimePerFrame0 ? avgTimePerFrame0 : 400000;

    char_t pomS[60];
    DPRINTF(_l("TffdshowVideoInputPin::initVideo: %s, width:%i, height:%i, aspectX:%i, aspectY:%i"), fourcc2str(hdr2fourcc(&biIn.bmiHeader, &mt.subtype), pomS, 60) , pictIn.rectFull.dx, pictIn.rectFull.dy, pictIn.rectFull.dar().num, pictIn.rectFull.dar().den);
again:
    codecId = (AVCodecID)getVideoCodecId(&biIn.bmiHeader, &mt.subtype, &biIn.bmiHeader.biCompression);

    // FIXME Experimental //
    // VC1 (in EVO) stream may have attached media type during playback (say, once per 5 second).
    // When I try to use its codec private data, the video heavily stutters.
    // pContext.pDMO->SetInputType (Currently ff_wmv.cpp line 769) takes too long.
    // I gave up using it and decided to ignore it during playback of VC1 stream.
    // It works fine for my sample.
    if (video) {
        if (/*video->codecId == CODEC_ID_WMV9_LIB && */wasVC1 && biIn.bmiHeader.biCompression == 0x31435657 /* "WVC1" */) {
            return true;
        } else if (is_quicksync_codec(video->codecId)) {
            // check if output pin is connected to a supported filter
            IPin *pConnectedPin = NULL;
            if (fv && fv->output) {
                pConnectedPin = fv->output->GetConnected();
                const CLSID &out = GetCLSID(pConnectedPin);
                if (out == CLSID_SampleGrabber || out == CLSID_MediaDetFilter) {
                    delete video;
                    codec = video = NULL;
                    switch (codecId) {
                        case CODEC_ID_H264_QUICK_SYNC:
                            codecId = AV_CODEC_ID_H264;
                            break;
                        case CODEC_ID_MPEG2_QUICK_SYNC:
                            codecId = AV_CODEC_ID_MPEG2VIDEO;
                            break;
                        case CODEC_ID_VC1_QUICK_SYNC:
                            codecId = CODEC_ID_WMV9_LIB;
                            break;
                        default:
                            ASSERT(FALSE); // this shouldn't happen!
                    }
                }
            }

            // no need to reset anything
            if (video) {
                return true;
            }
        } else {
            delete video;
            codec = video = NULL;
        }
    }
    DPRINTF(_l("TffdshowVideoInputPin::initVideo Codec detected : %s"), getCodecName(codecId));
    if (codecId == AV_CODEC_ID_NONE) {
        if (pCompatibleFilter != NULL) {
            rawDecode = true;
            if (video) {
                delete video;
                codec = video = NULL;
            }
            return true;
        }
        return false;
    }

    if (h264_codec(codecId) || codecId == CODEC_ID_H264_DXVA) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            H264_SPS sps;
            decodeH264SPS(extradata.data, extradata.size, pictIn, &sps);
            // Set frame rate information from SPS::VUI.
            if (!avgTimePerFrame0 // Use information from the upper stream filter if available.
                    && sps.timing_info_present_flag && sps.time_scale && sps.num_units_in_tick) {
                avgTimePerFrame = 2 * REF_SECOND_MULT * sps.num_units_in_tick / sps.time_scale;
            }
        }
    } else if (mpeg4_codec(codecId)) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            decodeMPEG4pictureHeader(extradata.data, extradata.size, pictIn);
        }
    } else if (mpeg12_codec(codecId)) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            bool isH264;
            if (decodeMPEGsequenceHeader(biIn.bmiHeader.biCompression == FOURCC_MPG2, extradata.data, extradata.size, pictIn, &isH264) && isH264) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
                goto again;
            }
        }
    }

    if (!fv->sink) {
        rawDecode = true;
        if (video) {
            delete video;
            codec = video = NULL;
        }
    } else {
        fv->initCodecSettings();
        codec = video = TvideoCodecDec::initDec(fv->deci, fv->sink, codecId, biIn.bmiHeader.biCompression, mt);

        if (!video) {
            return false;
        } else {
            static const GUID CLSID_NeroDigitalParser = {0xE206E4DE, 0xA7EE, 0x4A62, 0xB3, 0xE9, 0x4F, 0xBC, 0x8F, 0xE8, 0x4C, 0x73};
            static const GUID CLSID_HaaliMatroskaFile = {0x55DA30FC, 0xF16B, 0x49FC, 0xBA, 0xA5, 0xAE, 0x59, 0xFC, 0x65, 0xF8, 0x2D};
            codecId = video->codecId;
            //dont_use_rtStop_from_upper_stream=biIn.bmiHeader.biCompression==FOURCC_AVC1 && (searchPreviousFilter(this,CLSID_NeroDigitalParser) || searchPreviousFilter(this,CLSID_HaaliMatroskaFile));
            video->connectedSplitter = connectedSplitter;
            video->isInterlacedRawVideo = isInterlacedRawVideo;
            video->containerSar = pictIn.rectFull.sar;
            if (!video->beginDecompress(pictIn,
                                        biIn.bmiHeader.biCompression, mt,
                                        (dont_use_rtStop_from_upper_stream ? TvideoCodecDec::SOURCE_REORDER : 0))) {
                delete video;
                codec = video = NULL;
                return false;
            }
        }
        rawDecode = raw_codec(codecId);
    }
    allocator.NotifyMediaType(mt);
    strippacket = !!(mt.majortype == MEDIATYPE_DVD_ENCRYPTED_PACK);
    wasVC1 = biIn.bmiHeader.biCompression == 0x31435657 /* "WVC1" */;
    return true;
}