Пример #1
0
HRESULT PlayCutscene(LPTSTR szMovie, HINSTANCE hInstance)
{
    HRESULT hr;

    // Create the main hidden window to field keyboard input
    if (!CreateHiddenWindow(hInstance, szMovie))
        return E_FAIL;

    // Initialize COM
    if (FAILED(hr = CoInitialize(NULL)))
        return hr;

    // Get DirectShow interfaces
    if (FAILED(hr = GetInterfaces()))
    {
        CoUninitialize();
        return hr;
    }

    // Play the movie / cutscene
    hr = PlayMedia(szMovie, hInstance);

    // If the user interrupted playback and there was no other error,
    // return S_FALSE.
    if ((hr == S_OK) && g_bUserInterruptedPlayback)
        hr = S_FALSE;

    // Release DirectShow interfaces
    CleanupInterfaces();
    CoUninitialize();

    return hr;
}
void AVRStudioXMLParser::Parse(QString configDirPath, Part *pPart)
{
    QXmlQuery query;
    QString result;
    query.bindVariable("partName", QVariant(configDirPath));
    query.setQuery(GetXQuery());
    query.evaluateTo(&result);

    // for future use
    QString errorMsg;
    int line;
    int col;

    QDomDocument doc;
    if(!doc.setContent(result, &errorMsg, &line, &col))
        return;

    QDomNode root = doc.firstChild();
    QDomNode fuses = root.firstChild();
    QDomNode locks = fuses.nextSibling();
    QDomNode interfaces = locks.nextSibling();

    pPart->SetFuseBits(GetBits(fuses.firstChild()));
    pPart->SetLockBits(GetBits(locks.firstChild()));
    pPart->SetProgrammingInterfaces(GetInterfaces(interfaces.firstChild()));
}
Пример #3
0
bool CSystemObjectWrapper::Attach(ISystemUnknown *piUnknown)
{
    Detach();
	m_bAttached=true;
    if(piUnknown){return GetInterfaces(piUnknown);}
    return true;
}
Пример #4
0
HRESULT CAccessSys::BuildCapture(void)
{
	HRESULT hr;
	IBaseFilter *pSrcFilter = NULL;
	IBaseFilter *pMpeg2Filter = NULL;

	if (b_buildCapture){
		return S_OK;
	}

	// Get DirectShow interfaces
	hr = GetInterfaces();
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
		return hr;
	}


	pSrcFilter = p_streams[0].p_device_filter;

	// Render the preview pin on the video capture filter
	// Use this instead of g_pGraph->RenderFile
	p_VideoFilter = new CMyCapVideoFilter(this, NULL, &m_lock, NULL);
    hr = p_graph->AddFilter(p_VideoFilter, L"video filter");
	hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
                                                pSrcFilter, NULL, p_VideoFilter);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't render the video capture stream to my filter.  hr=0x%x\r\n")
			TEXT("The capture device may already be in use by another application.\r\n\r\n")
			TEXT("The sample will now close."), hr);
		pSrcFilter->Release();
		return hr;
	}

	pSrcFilter = p_streams[1].p_device_filter;

	p_AudioFilter = new CMyCapAudioFilter(this, NULL, &m_alock, NULL);
    hr = p_graph->AddFilter(p_AudioFilter, L"audio filter");
	hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
                                                pSrcFilter, NULL, p_AudioFilter);

	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't render the audio capture stream to my audio filter.  hr=0x%x\r\n")
			TEXT("The capture device may already be in use by another application.\r\n\r\n")
			TEXT("The sample will now close."), hr);
		pSrcFilter->Release();
		return hr;
	}
    b_buildCapture = true;
	return S_OK;
}
Пример #5
0
//-----------------------------------------------------------------------------
// Purpose: called when the plugin is loaded, load the interface we need from the engine
//-----------------------------------------------------------------------------
bool CSourcePython::Load(	CreateInterfaceFn interfaceFactory, CreateInterfaceFn gameServerFactory )
{
	// This seems to be new with
#ifdef ENGINE_CSGO
	ConnectInterfaces(&interfaceFactory, 1);
#else
	ConnectTier1Libraries( &interfaceFactory, 1 );
	ConnectTier2Libraries( &interfaceFactory, 2 );
#endif

	// Get all engine interfaces.
	if( !GetInterfaces(gEngineInterfaces, interfaceFactory) ) {
		return false;
	}

	// Get all game interfaces.
	if( !GetInterfaces(gGameInterfaces, gameServerFactory) ) {
		return false;
	}

	gpGlobals = playerinfomanager->GetGlobalVars();

	MathLib_Init( 2.2f, 2.2f, 0.0f, 2.0f );
	InitCommands();

	// Initialize game paths.
	if( !g_GamePaths.Initialize() ) {
		DevMsg(0, "Could not initialize game paths.");
		return false;
	}

	// Initialize python
	if( !g_PythonManager.Initialize() ) {
		DevMsg(0, "Could not initialize python.");
		return false;
	}

	return true;
}
Пример #6
0
bool CSystemObjectWrapper::Create(ISystem *piSystem,std::string sClass,std::string sName)
{
    ReleaseInterfaces();
    bool            bOk=false;
    ISystemObject   *piObject=NULL;
    if(!piSystem->GetObject(sName,&piObject))
    {
        if(piSystem->CreateObject(sClass,sName,&piObject))
        {
            //m_bObjectMustBeDestroyed=true;
        }
    }
    if(piObject){bOk=GetInterfaces(piObject);}
    REL(piObject);
    return bOk;
}
Пример #7
0
BOOL CFrontendDlg::OnInitDialog()
{
	HICON Small, Large;

	CDialog::OnInitDialog();

	Small = (HICON)::LoadImage(AfxGetApp()->m_hInstance,
		MAKEINTRESOURCE(IDI_ICON1),
		IMAGE_ICON, GetSystemMetrics(SM_CXSMICON),
		GetSystemMetrics(SM_CYSMICON), 0);

	Large = (HICON)::LoadImage(AfxGetApp()->m_hInstance,
		MAKEINTRESOURCE(IDI_ICON1),
		IMAGE_ICON, GetSystemMetrics(SM_CXICON),
		GetSystemMetrics(SM_CYICON), 0);

	SetIcon(Small, FALSE);
	SetIcon(Large, TRUE);

	GetInterfaces();

	m_TabCtrl.InsertItem(0, "Settings");
	m_TabCtrl.InsertItem(1, "Output");
	m_TabCtrl.InsertItem(2, "Nodes");
	m_TabCtrl.InsertItem(3, "Routes");

	m_TabCtrl.InitTabDialogs(&Interfaces, &Addresses, &IsWlan);

	m_StopButton.EnableWindow(FALSE);

	if (!ConfigFile.IsEmpty())
	{
		if (m_TabCtrl.m_Dialog2.OpenConfigFile(ConfigFile) < 0)
			AfxMessageBox("Cannot open configuration file '" + ConfigFile + "'.");

		else
		{
			OnStartButton();

			m_TabCtrl.SetCurSel(1);
			m_TabCtrl.DisplayTabDialog();
		}
	}

	return TRUE;
}
Пример #8
0
bool CSystemObjectWrapper::Create(std::string sSystem,std::string sClass,std::string sName)
{
    ReleaseInterfaces();

    bool            bOk=false;
    ISystemObject   *piObject=NULL;
    ISystem         *piSystem=NULL;
    ISystemManager  *piManager=GetSystemManager();

    if(piManager){piSystem=piManager->GetSystem(sSystem);}
    if(piSystem){bOk=Create(piSystem,sClass,sName);}
    if(piObject){bOk=GetInterfaces(piObject);}
	if(!bOk)
	{
		RTTRACE("CSystemObjectWrapper::Create -> Failed to create System: %s, Class: %s, Object:%s",sSystem.c_str(),sClass.c_str(),sName.c_str());
	}
    REL(piManager);
    REL(piSystem);
    REL(piObject);
    return bOk;
}
Пример #9
0
//-----------------------------------------------------------------------------
// Purpose: called when the plugin is loaded, load the interface we need from the engine
//-----------------------------------------------------------------------------
bool CSourcePython::Load(	CreateInterfaceFn interfaceFactory, CreateInterfaceFn gameServerFactory )
{
	// This seems to be new with
#ifdef ENGINE_CSGO
	DevMsg(1, MSG_PREFIX "Connecting interfaces...\n");
	ConnectInterfaces(&interfaceFactory, 1);
#else
	DevMsg(1, MSG_PREFIX "Connecting tier1 libraries...\n");
	ConnectTier1Libraries( &interfaceFactory, 1 );

	DevMsg(1, MSG_PREFIX "Connecting tier2 libraries...\n");
	ConnectTier2Libraries( &interfaceFactory, 2 );
#endif

	// Get all engine interfaces.
	DevMsg(1, MSG_PREFIX "Retrieving engine interfaces...\n");
	if( !GetInterfaces(gEngineInterfaces, interfaceFactory) ) {
		return false;
	}

	// Get all game interfaces.
	DevMsg(1, MSG_PREFIX "Retrieving game interfaces...\n");
	if( !GetInterfaces(gGameInterfaces, gameServerFactory) ) {
		return false;
	}
	
	DevMsg(1, MSG_PREFIX "Retrieving global variables...\n");
	gpGlobals = playerinfomanager->GetGlobalVars();
	if (!gpGlobals) {
		Msg(MSG_PREFIX "Could retrieve global variables.\n");
		return false;
	}
	
	DevMsg(1, MSG_PREFIX "Initializing mathlib...\n");
	MathLib_Init( 2.2f, 2.2f, 0.0f, 2.0f );

	DevMsg(1, MSG_PREFIX "Initializing server and say commands...\n");
	InitCommands();

	// Initialize python
	DevMsg(1, MSG_PREFIX "Initializing python...\n");
	if( !g_PythonManager.Initialize() ) {
		Msg(MSG_PREFIX "Could not initialize python.\n");
		return false;
	}

	// TODO: Don't hardcode the 64 bytes offset
#ifdef ENGINE_LEFT4DEAD2
	#define CACHE_NOTIFY_OFFSET 68
#else
	#define CACHE_NOTIFY_OFFSET 64
#endif

	DevMsg(1, MSG_PREFIX "Retrieving the current cache notifier...\n");
	m_pOldMDLCacheNotifier = *(IMDLCacheNotify **)(((unsigned long) modelcache) + CACHE_NOTIFY_OFFSET);

	DevMsg(1, MSG_PREFIX "Setting the new cache notifier...\n");
	modelcache->SetCacheNotify(this);
	
	Msg(MSG_PREFIX "Loaded successfully.\n");
	return true;
}
Пример #10
0
//
// CFilePlayer::BuildGraph(): Builds a AVI/MPEG/.. playback graph rendering via
// OverlayMixer which uses app's given DDraw params.
//
HRESULT CFilePlayer::BuildGraph(HWND hWndApp, LPDIRECTDRAW pDDObj, LPDIRECTDRAWSURFACE pDDPrimary)
{
    DbgLog((LOG_TRACE, 5, TEXT("CFilePlayer::BuildGraph() entered"))) ;

    HRESULT       hr ;
    IBaseFilter  *pOvM ;
    WCHAR         achwFileName[MAX_PATH] ;
    LPWSTR        lpszwFileName = NULL ;
    IBaseFilter  *pVR ;

    // First release any existing interface pointer(s)
    ReleaseInterfaces() ;
    SetColorKey(253) ;  // default magenta for 8bpp

    // Check if a file name has been specified; if so, use that
    if (lstrlen(GetFileName()) > 0)  // if something was specified before
    {
#ifdef UNICODE
        lstrcpy(achwFileName, GetFileName()) ;
#else
        MultiByteToWideChar(CP_ACP, 0, GetFileName(), -1, achwFileName, MAX_PATH) ;
#endif

        lpszwFileName = achwFileName ;
    }
    else  // no file specified, but we should have detected it before!!!
        return E_FAIL ;

    //
    // Instantiate Overlay Mixer, add it to the graph and set DDraw params
    //
    hr = AddOvMToGraph(&pOvM, pDDObj, pDDPrimary) ;
    if (FAILED(hr))
    {
        return E_FAIL ;
    }

    //
    // First render the graph for the selected file
    //
    hr = m_pGraph->RenderFile(lpszwFileName, NULL) ;
    if (S_OK != hr)
    {
        DbgLog((LOG_ERROR, 0,
            TEXT("Rendering the given file didn't succeed completely (Error 0x%lx)"), hr)) ;
        m_pGraph->RemoveFilter(pOvM) ;  // remove from graph
        pOvM->Release() ;               // release filter
        return E_FAIL ;
    }

    //
    // Because there are some AVI files which on some machines decide to rather go
    // through the Color Space Converter filter, just making sure that the OverlayMixer
    // is actually being used.  Otherwise we have to do some more (bull)work.
    //
    if (! IsOvMConnected(pOvM) )
    {
        DbgLog((LOG_TRACE, 1, TEXT("OverlayMixer is not used in the graph. Try again..."))) ;

        //
        // Check that the  specified file has a video stream. Otherwise OverlayMixer
        // will never be used and DDraw exclusive mode playback doesn't make any sense.
        //
        if (FAILED(GetVideoRendererInterface(&pVR)))
        {
            DbgLog((LOG_TRACE, 1, TEXT("Specified file doesn't have any video stream. Aborting graph building."))) ;
            m_pGraph->RemoveFilter(pOvM) ;  // remove from graph
            pOvM->Release() ;               // release filter
            return E_FAIL ;
        }

        //
        // Put the video stream to go through the OverlayMixer.
        //
        hr = PutVideoThroughOvM(pOvM, pVR) ;
        if (FAILED(hr))
        {
            DbgLog((LOG_TRACE, 1, TEXT("Couldn't put video through the OverlayMixer."))) ;
            m_pGraph->RemoveFilter(pOvM) ;  // remove OvMixer from graph
            pOvM->Release() ;               // release OvMixer filter
            pVR->Release() ;                // release VR interface (before giving up)
            return E_FAIL ;
        }
        pVR->Release() ;    // done with VR interface
    }

    //
    // We are successful in building the graph. Now the rest...
    //
    GetInterfaces(hWndApp) ;

    // Get IDDrawExclModeVideo interface of the OvMixer and store it
    hr = pOvM->QueryInterface(IID_IDDrawExclModeVideo, (LPVOID *)&m_pDDXM) ;
    ASSERT(SUCCEEDED(hr)) ;

    // Get the color key to be used and store it
    hr = GetColorKeyInternal(pOvM) ;
    ASSERT(SUCCEEDED(hr)) ;

    pOvM->Release() ;  // done with it

    return S_OK ;
}
Пример #11
0
//
// CDVDPlayer::BuildGraph(): Builds a filter graph for playing back the specified
// DVD title/file.  Also gets some interfaces that are required for controlling
// playback.
//
HRESULT CDVDPlayer::BuildGraph(HWND hWndApp, LPDIRECTDRAW pDDObj, LPDIRECTDRAWSURFACE pDDPrimary)
{
    DbgLog((LOG_TRACE, 5, TEXT("CDVDPlayer::BuildGraph() entered"))) ;

    HRESULT     hr ;

    // First release any existing interface pointer(s)
    ReleaseInterfaces() ;
    SetColorKey(253) ;  // default magenta for 8bpp

    // Check if a DVD-Video volume name has been specified; if so, use that
    WCHAR    achwFileName[MAX_PATH] ;
    LPCWSTR  lpszwFileName = NULL ;  // by default
    if (lstrlen(GetFileName()) > 0)  // if something was specified before
    {
#ifdef UNICODE
        lstrcpy(achwFileName, GetFileName()) ;
#else
        MultiByteToWideChar(CP_ACP, 0, GetFileName(), -1, achwFileName, MAX_PATH) ;
#endif // UNICODE

        lpszwFileName = achwFileName ;
    }
    DbgLog((LOG_TRACE, 5, TEXT("DVD file <%s> will be played"), GetFileName())) ;

    // Set DDraw object and surface on DVD graph builder before starting to build graph
    IDDrawExclModeVideo  *pDDXMV ;
    hr = m_pDvdGB->GetDvdInterface(IID_IDDrawExclModeVideo, (LPVOID *)&pDDXMV) ;
    if (FAILED(hr) || NULL == pDDXMV)
    {
        DbgLog((LOG_ERROR, 0,
            TEXT("ERROR: IDvdGB::GetDvdInterface(IDDrawExclModeVideo) failed (Error 0x%lx)"), hr)) ;
        return hr ;
    }
    hr = pDDXMV->SetDDrawObject(pDDObj) ;
    if (FAILED(hr))
    {
        DbgLog((LOG_ERROR, 0, TEXT("ERROR: IDDrawExclModeVideo::SetDDrawObject() failed (Error 0x%lx)"), hr)) ;
        pDDXMV->Release() ;  // release before returning
        return hr ;
    }
    hr = pDDXMV->SetDDrawSurface(pDDPrimary) ;
    if (FAILED(hr))
    {
        DbgLog((LOG_ERROR, 0, TEXT("ERROR: IDDrawExclModeVideo::SetDDrawSurface() failed (Error 0x%lx)"), hr)) ;
        pDDXMV->SetDDrawObject(NULL) ;  // to reset
        pDDXMV->Release() ;  // release before returning
        return hr ;
    }
    pDDXMV->Release() ;  // done with the interface

    // Build the graph
    AM_DVD_RENDERSTATUS   Status ;
    TCHAR                 achBuffer[1000] ;
    hr = m_pDvdGB->RenderDvdVideoVolume(lpszwFileName,
        AM_DVD_HWDEC_PREFER, &Status) ;
    if (FAILED(hr))
    {
        AMGetErrorText(hr, achBuffer, sizeof(achBuffer)) ;
        MessageBox(hWndApp, achBuffer, "Error", MB_OK) ;
        return hr ;
    }
    if (S_FALSE == hr)  // if partial success
    {
        std::basic_string<TCHAR>   l_Text;
        if (GetStatusText(&Status, achBuffer, sizeof(achBuffer)))
	    {
            l_Text.assign(achBuffer);
        }
        else
        {
            l_Text.assign(TEXT("An unknown error has occurred"));
        }
        l_Text.append(TEXT("\n\nDo you still want to continue?"));
        if (IDNO == MessageBox(hWndApp, l_Text.c_str(), TEXT("Warning"), MB_YESNO))
        {
            return E_FAIL;
        }
    }

    GetInterfaces(hWndApp) ;

    hr = GetColorKeyInternal() ;
    ASSERT(SUCCEEDED(hr)) ;

    return S_OK ;
}
Пример #12
0
HRESULT CaptureVideo()
{
    HRESULT hr;
    IBaseFilter *pSrcFilter=NULL;

    // Get DirectShow interfaces
    hr = GetInterfaces();
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
        return hr;
    }

    // Attach the filter graph to the capture graph
    hr = g_pCapture->SetFiltergraph(g_pGraph);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
        return hr;
    }

    // Use the system device enumerator and class enumerator to find
    // a video capture/preview device, such as a desktop USB video camera.
    hr = FindCaptureDevice(&pSrcFilter);
    if (FAILED(hr))
    {
        // Don't display a message because FindCaptureDevice will handle it
        return hr;
    }
   
    // Add Capture filter to our graph.
    hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n") 
            TEXT("If you have a working video capture device, please make sure\r\n")
            TEXT("that it is connected and is not being used by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Render the preview pin on the video capture filter
    // Use this instead of g_pGraph->RenderFile
    hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
                                   pSrcFilter, NULL, NULL);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
            TEXT("The capture device may already be in use by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Now that the filter has been added to the graph and we have
    // rendered its stream, we can release this reference to the filter.
    pSrcFilter->Release();

    // Set video window style and position
    hr = SetupVideoWindow();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
        return hr;
    }

#ifdef REGISTER_FILTERGRAPH
    // Add our graph to the running object table, which will allow
    // the GraphEdit application to "spy" on our graph
    hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        g_dwGraphRegister = 0;
    }
#endif

    // Start previewing video data
    hr = g_pMC->Run();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
        return hr;
    }

    // Remember current state
    g_psCurrent = Running;
        
    return S_OK;
}
Пример #13
0
HRESULT CAccessSys::BuildPreview(void)
{
	HRESULT hr;
	IBaseFilter *pSrcFilter = NULL;

	if (b_buildPreview){
		return S_OK;
	}

	// Get DirectShow interfaces
	hr = GetInterfaces();
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
		return hr;
	}


	pSrcFilter = p_streams[0].p_device_filter;
	
	hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
		pSrcFilter, NULL, NULL);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
			TEXT("The capture device may already be in use by another application.\r\n\r\n")
			TEXT("The sample will now close."), hr);
		pSrcFilter->Release();
		return hr;
	}

	//{
	//	IEnumPins *ep;
	//	IPin *inputpin = NULL;
	//	IPin *voutputpin = NULL;
	//	IPin *aoutputpin = NULL;
	//	IPin *pin = NULL;
	//	bool bFindI420 = false;
	//	bool bFindPCM = false;

	//	pSrcFilter = p_streams[0].p_device_filter;

	//	pSrcFilter->EnumPins(&ep);
	//	if (SUCCEEDED(hr)){
	//		ep->Reset();
	//		while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
	//			PIN_DIRECTION pinDir;
	//			pin->QueryDirection(&pinDir);
	//			if (pinDir == PINDIR_OUTPUT){
	//				AM_MEDIA_TYPE *pmt;
	//				IEnumMediaTypes *emt;
	//				pin->EnumMediaTypes(&emt);
	//				while (hr = emt->Next(1, &pmt, NULL), hr != S_FALSE){
	//					if (pmt->majortype == MEDIATYPE_Video){
	//						if (pmt->subtype == MEDIASUBTYPE_RGB24){
	//							//Msg(TEXT("MEDIASUBTYPE_RGB24"));
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_I420){
	//							//Msg(TEXT("MEDIASUBTYPE_I420"));
	//							bFindI420 = true;
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_YUY2){}
	//					}
	//					TCHAR buf[64] = { 0 };
	//					swprintf(buf, TEXT("{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}"),
	//						pmt->subtype.Data1, pmt->subtype.Data2, pmt->subtype.Data3,
	//						pmt->subtype.Data4[0], pmt->subtype.Data4[1],
	//						pmt->subtype.Data4[2], pmt->subtype.Data4[3],
	//						pmt->subtype.Data4[4], pmt->subtype.Data4[5],
	//						pmt->subtype.Data4[6], pmt->subtype.Data4[7]);
	//					//Msg(buf);
	//					DeleteMediaType(pmt);
	//				}
	//				emt->Release();
	//			}
	//			pin->Release();
	//			pin = NULL;
	//		}
	//	}
	//	RELEASE(ep);
	//}

	pSrcFilter = p_streams[1].p_device_filter;
	
    // do not render local audio
	//hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Audio,
	//	pSrcFilter, NULL, NULL);
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
	//		TEXT("The capture device may already be in use by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

    {
        IEnumPins *ep;
        IPin *pin = NULL;

        IAMBufferNegotiation *buffer_negotiation = NULL;
        ALLOCATOR_PROPERTIES props = { -1, -1, -1, -1 };

        pSrcFilter->EnumPins(&ep);
        ep->Reset();
        while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
            if (pin->QueryInterface(IID_IAMBufferNegotiation, (void **)&buffer_negotiation) == S_OK){
                buffer_negotiation->GetAllocatorProperties(&props);
                props.cbBuffer = 4096; // set to 4096 byte: acc encode frame length
                buffer_negotiation->SuggestAllocatorProperties(&props);
                RELEASE(buffer_negotiation);
            }
            RELEASE(pin);
        }
        RELEASE(ep);
    }

	//{
	//	IEnumPins *ep;
	//	IPin *inputpin = NULL;
	//	IPin *voutputpin = NULL;
	//	IPin *aoutputpin = NULL;
	//	IPin *pin = NULL;
	//	bool bFindI420 = false;
	//	bool bFindPCM = false;

	//	//pSrcFilter = p_streams[0].p_device_filter;

	//	pSrcFilter->EnumPins(&ep);
	//	if (SUCCEEDED(hr)){
	//		ep->Reset();
	//		while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
	//			PIN_DIRECTION pinDir;
	//			pin->QueryDirection(&pinDir);
	//			if (pinDir == PINDIR_OUTPUT){
	//				AM_MEDIA_TYPE *pmt;
	//				IEnumMediaTypes *emt;
	//				pin->EnumMediaTypes(&emt);
	//				while (hr = emt->Next(1, &pmt, NULL), hr != S_FALSE){
	//					if (pmt->majortype == MEDIATYPE_Audio){
	//						if (pmt->subtype == MEDIASUBTYPE_PCM){
	//							//Msg(TEXT("MEDIASUBTYPE_PCM"));
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_I420){
	//							//Msg(TEXT("MEDIASUBTYPE_I420"));
	//							bFindI420 = true;
	//						}
	//						else{
	//							bFindI420 = true;
	//						}
	//					}
	//					TCHAR buf[64] = { 0 };
	//					swprintf(buf, TEXT("{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}"),
	//						pmt->subtype.Data1, pmt->subtype.Data2, pmt->subtype.Data3,
	//						pmt->subtype.Data4[0], pmt->subtype.Data4[1],
	//						pmt->subtype.Data4[2], pmt->subtype.Data4[3],
	//						pmt->subtype.Data4[4], pmt->subtype.Data4[5],
	//						pmt->subtype.Data4[6], pmt->subtype.Data4[7]);
	//					//Msg(buf);
	//					DeleteMediaType(pmt);
	//				}
	//				emt->Release();
	//			}
	//			pin->Release();
	//			pin = NULL;
	//		}
	//	}
	//	RELEASE(ep);
	//}

	b_buildPreview = true;
	return hr;
}
Пример #14
0
HRESULT cDxCapture::CaptureVideo(const int cameraIndex) //cameraIndex = 0
{
	HRESULT hr;
	IBaseFilter *pSrcFilter = NULL;

	// Get DirectShow interfaces
	hr = GetInterfaces();
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
		return hr;
	}

	// Attach the filter graph to the capture graph
	hr = m_pCapture->SetFiltergraph(m_pGraph);
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
		return hr;
	}

	// Use the system device enumerator and class enumerator to find
	// a video capture/preview device, such as a desktop USB video camera.
	hr = FindCaptureDevice(cameraIndex, &pSrcFilter);
	if (FAILED(hr))
	{
		// Don't display a message because FindCaptureDevice will handle it
		return hr;
	}

	// Add Capture filter to our graph.
	hr = m_pGraph->AddFilter(pSrcFilter, L"Video Capture");
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n")
			TEXT("If you have a working video capture device, please make sure\r\n")
			TEXT("that it is connected and is not being used by another application.\r\n\r\n")
			TEXT("The sample will now close."), hr);
		pSrcFilter->Release();
		return hr;
	}


	IEnumPins *pEnum = NULL;
	IPin *pPin = NULL;
	hr = pSrcFilter->EnumPins(&pEnum);
	if (FAILED(hr))
		return hr;


	while (S_OK == pEnum->Next(1, &pPin, NULL))
	{
		hr = ConnectFilters(m_pGraph, pPin, m_pGrabberF);
		SafeRelease(&pPin);
		if (SUCCEEDED(hr))
		{
			break;
		}
	}

	if (FAILED(hr))
		return hr;


	hr = m_pGraph->AddFilter(m_pNullF, L"Null Filter");
	if (FAILED(hr))
		return hr;

	hr = ConnectFilters(m_pGraph, m_pGrabberF, m_pNullF);
	if (FAILED(hr))
		return hr;


	hr = m_pGrabber->SetOneShot(TRUE);
	if (FAILED(hr))
		return hr;

	hr = m_pGrabber->SetBufferSamples(TRUE);
	if (FAILED(hr))
		return hr;

	// Now that the filter has been added to the graph and we have
	// rendered its stream, we can release this reference to the filter.
	pSrcFilter->Release();

	// Start previewing video data
	hr = m_pMC->Run();
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
		return hr;
	}

	// Remember current state
	m_psCurrent = RUNNING;

	long evCode;
	hr = m_pME->WaitForCompletion(INFINITE, &evCode);


	return S_OK;
}
Пример #15
0
HRESULT CaptureVideo()
{
    HRESULT hr;
    IBaseFilter *pSrcFilter=NULL;

    // Get DirectShow interfaces
    hr = GetInterfaces();
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
        return hr;
    }

    // Attach the filter graph to the capture graph
    hr = g_pCapture->SetFiltergraph(g_pGraph);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
        return hr;
    }

    // Use the system device enumerator and class enumerator to find
    // a video capture/preview device, such as a desktop USB video camera.
    hr = FindCaptureDevice(&pSrcFilter);
    if (FAILED(hr))
    {
        // Don't display a message because FindCaptureDevice will handle it
        return hr;
    }
   
    // Add Capture filter to our graph.
    hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n") 
            TEXT("If you have a working video capture device, please make sure\r\n")
            TEXT("that it is connected and is not being used by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }


    // Copied code
    //========================================
    IAMStreamConfig *pSC;

    hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Interleaved,
                                      pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);

    if(FAILED(hr))
        hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Video, pSrcFilter,
                                      IID_IAMStreamConfig, (void **)&pSC);

    if (!pSC) {
        return hr;
    }

    int iCount = 0, iSize = 0;
    hr = pSC->GetNumberOfCapabilities(&iCount, &iSize);

    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
        // Use the video capabilities structure.

        int i = 0;

        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
            VIDEO_STREAM_CONFIG_CAPS scc;
            AM_MEDIA_TYPE *pmtConfig;

            hr = pSC->GetFormat(&pmtConfig);

            VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            double fps = 30;

            pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps);

            pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader); 

            pvi->bmiHeader.biWidth = 1920;

            pvi->bmiHeader.biHeight = 1080;

            hr = pSC->SetFormat(pmtConfig);

            

            //hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
            //if (SUCCEEDED(hr))
            //{
            //    /* Examine the format, and possibly use it. */
            //    if (pmtConfig->formattype == FORMAT_VideoInfo) {
            //        long width = HEADER(pmtConfig->pbFormat)->biWidth;
            //        long height = HEADER(pmtConfig->pbFormat)->biHeight;

            //        

            //        if (width == 1920 && height == 1080) {
            //            VIDEOINFOHEADER *info = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            //            if (i == 0) {
            //                pSC->SetFormat(pmtConfig);
            //                DeleteMediaType(pmtConfig);
            //                break;
            //            }
            //            i++;
            //        }
            //    }

            //    // Delete the media type when you are done.
            //    DeleteMediaType(pmtConfig);
            //}
        }
    }

    if(SUCCEEDED(hr)) {
        pSC->Release();
    }

    

    //========================================

    // Render the preview pin on the video capture filter
    // Use this instead of g_pGraph->RenderFile
    hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
                                   pSrcFilter, NULL, NULL);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
            TEXT("The capture device may already be in use by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Now that the filter has been added to the graph and we have
    // rendered its stream, we can release this reference to the filter.
    pSrcFilter->Release();

    // Set video window style and position
    hr = SetupVideoWindow();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
        return hr;
    }

#ifdef REGISTER_FILTERGRAPH
    // Add our graph to the running object table, which will allow
    // the GraphEdit application to "spy" on our graph
    hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        g_dwGraphRegister = 0;
    }
#endif

    // Start previewing video data
    hr = g_pMC->Run();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
        return hr;
    }

    // Remember current state
    g_psCurrent = Running;
        
    return S_OK;
}