bool CVideoRenderer::Initialize ( const WCHAR * fileName )
{
	HRESULT hr;
	#define EIF( _x ) if( FAILED( hr=_x ) ) goto error

	// --- check for a valid device
	if ( !m_pDevice ) return false;

	Lock ();

	// --- create the filter graph
	EIF( CoCreateInstance( CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&m_pGraph ) );
	m_pFilter = CreateTextureRenderer ( &hr, m_pDevice, this );
	EIF( hr );

	// --- add our texture filter and set up the graph to render to this filter
	m_pGraph->AddFilter( m_pFilter,	L"Texture renderer" );

	EIF( RenderFileToVideoRenderer( m_pGraph, fileName, FALSE ) );

#ifdef _DEBUG
	// --- register this graph for use with graphedt
	if( FAILED( hr = AddGraphToRot( m_pGraph, &m_dwGraphRegister ) ) ) {
		// ACHTUNG: failure, report and move on
	}
#endif

	// --- get some interfaces
	m_pGraph->QueryInterface( IID_IMediaSeeking, (void**)&m_pMediaSeeking );
	m_pGraph->QueryInterface( IID_IMediaControl, (void**)&m_pMediaControl );
	m_pGraph->QueryInterface( IID_IBasicAudio, (void**)&m_pBasicAudio );

	m_pBasicAudio->get_Volume ( &lDefaultVolume );

	// --- clean up
	m_pGraph->Release ();
	m_pGraph = NULL;

	// --- unlock the mutex
	Unlock ();

	return true;

error:
	// --- failure occurred
	hr = hr;

	return false;
}
directx_videofile_server::directx_videofile_server(const char *filename) :
  _pMediaSeeking(NULL)
{
  //---------------------------------------------------------------------
  if (!open_and_find_parameters(filename)) {
    fprintf(stderr, "directx_videofile_server::directx_videofile_server(): Cannot open file %s\n",
      filename);
    _status = false;
    return;
  }

  //---------------------------------------------------------------------
  // Let the graph editor connect and view this graph
#ifdef REGISTER_FILTERGRAPH
  if (FAILED(AddGraphToRot(_pGraph, &_dwGraphRegister))) {
      fprintf(stderr,"Failed to register filter graph with ROT!\n");
      _dwGraphRegister = 0;
  }
#endif

  //---------------------------------------------------------------------
  // Allocate a buffer that is large enough to read the maximum-sized
  // image with no binning.
  _buflen = (unsigned)(_num_rows * _num_columns * 3);	// Expect B,G,R; 8-bits each.
  if ( (_buffer = new unsigned char[_buflen]) == NULL) {
    fprintf(stderr,"directx_videofile_server::directx_videofile_server(): Out of memory for buffer\n");
    _status = false;
    return;
  }

  //---------------------------------------------------------------------
  // No image in memory yet.
  _minX = _minY = _maxX = _maxY = 0;

  //---------------------------------------------------------------------
  // Set mode to running, status to good and return
  _mode = 0;
  _status = true;
}
Beispiel #3
0
HRESULT BlendVideo(LPTSTR szFile1, LPTSTR szFile2)
{
    USES_CONVERSION;
    WCHAR wFile1[MAX_PATH], wFile2[MAX_PATH];
    HRESULT hr;

    // Check input string
    if ((szFile1 == NULL) || (szFile2 == NULL))
        return E_POINTER;

    // Clear open dialog remnants before calling RenderFile()
    UpdateWindow(ghApp);

    // Convert filenames to wide character strings
    wcsncpy(wFile1, T2W(szFile1), NUMELMS(wFile1)-1);
    wcsncpy(wFile2, T2W(szFile2), NUMELMS(wFile2)-1);
    wFile1[MAX_PATH-1] = wFile2[MAX_PATH-1] = 0;

    // Get the interface for DirectShow's GraphBuilder
    JIF(CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
                         IID_IGraphBuilder, (void **)&pGB));

    if(SUCCEEDED(hr))
    {
        // Unlike the VMR7 in Windows XP, the VMR9 is not the default renderer
        // (to preserve backward compatibility).  In some multifile graphs,
        // the filter graph manager could decide to load the default Video Renderer
        // filter during RenderFile(), even though the VMR9 is already present
        // in the graph.  Since the default Video Renderer has a higher merit 
        // than the VMR9, it would be connected instead of the VMR9, leading to
        // the video streams being displayed in multiple separate windows.
        // This could be the case with AVI files created with legacy VfW codecs
        // or when two Color Space convertors must be used (each requiring its
        // own allocator).
        // Therefore, we render the files programmatically instead of using
        // the standard IGraphBuilder::RenderFile() method.
        if (FAILED(ConfigureMultiFileVMR9(wFile1, wFile2)))
            return hr;

        // QueryInterface for DirectShow interfaces
        JIF(pGB->QueryInterface(IID_IMediaControl, (void **)&pMC));
        JIF(pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME));
        JIF(pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS));

        // Is this an audio-only file (no video component)?
        if (CheckVideoVisibility())
        {
            JIF(InitVideoWindow(1, 1));
        }
        else
        {
            // This sample requires video clips to be loaded
            Msg(TEXT("This sample requires media with a video component.  ")
                TEXT("Please select another file."));
            return E_FAIL;
        }

        // Have the graph signal event via window callbacks
        JIF(pME->SetNotifyWindow((OAHWND)ghApp, WM_GRAPHNOTIFY, 0));

        // Complete the window setup
        ShowWindow(ghApp, SW_SHOWNORMAL);
        UpdateWindow(ghApp);
        SetForegroundWindow(ghApp);
        SetFocus(ghApp);

#ifdef REGISTER_FILTERGRAPH
        if (FAILED(AddGraphToRot(pGB, &g_dwGraphRegister)))
        {
            Msg(TEXT("Failed to register filter graph with ROT!"));
            g_dwGraphRegister = 0;
        }
#endif

        // Run the graph to play the media files
        MoveVideoWindow();
        JIF(pMC->Run());
        
        EnableMenus(TRUE);      
    }

    return hr;
}
HRESULT CaptureVideo()
{
    HRESULT hr;
    IBaseFilter *pSrcFilter=NULL;

    // Get DirectShow interfaces
    hr = GetInterfaces();
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
        return hr;
    }

    // Attach the filter graph to the capture graph
    hr = g_pCapture->SetFiltergraph(g_pGraph);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
        return hr;
    }

    // Use the system device enumerator and class enumerator to find
    // a video capture/preview device, such as a desktop USB video camera.
    hr = FindCaptureDevice(&pSrcFilter);
    if (FAILED(hr))
    {
        // Don't display a message because FindCaptureDevice will handle it
        return hr;
    }
   
    // Add Capture filter to our graph.
    hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n") 
            TEXT("If you have a working video capture device, please make sure\r\n")
            TEXT("that it is connected and is not being used by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Render the preview pin on the video capture filter
    // Use this instead of g_pGraph->RenderFile
    hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
                                   pSrcFilter, NULL, NULL);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
            TEXT("The capture device may already be in use by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Now that the filter has been added to the graph and we have
    // rendered its stream, we can release this reference to the filter.
    pSrcFilter->Release();

    // Set video window style and position
    hr = SetupVideoWindow();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
        return hr;
    }

#ifdef REGISTER_FILTERGRAPH
    // Add our graph to the running object table, which will allow
    // the GraphEdit application to "spy" on our graph
    hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        g_dwGraphRegister = 0;
    }
#endif

    // Start previewing video data
    hr = g_pMC->Run();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
        return hr;
    }

    // Remember current state
    g_psCurrent = Running;
        
    return S_OK;
}
Beispiel #5
0
HRESULT PlayMovieInWindow(LPTSTR szFile)
{
    USES_CONVERSION;
    WCHAR wFile[MAX_PATH];
    HRESULT hr;

    // Check input string
    if (szFile == NULL)
        return E_POINTER;

    // Clear open dialog remnants before calling RenderFile()
    UpdateWindow(ghApp);

    // Convert filename to wide character string
    wcsncpy(wFile, T2W(szFile), NUMELMS(wFile)-1);
    wFile[MAX_PATH-1] = 0;

    // Get the interface for DirectShow's GraphBuilder
    JIF(CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
                         IID_IGraphBuilder, (void **)&pGB));

    if(SUCCEEDED(hr))
    {
        // Create the Video Mixing Renderer and add it to the graph
        JIF(InitializeWindowlessVMR());
                
        // Have the graph builder construct its the appropriate graph automatically
        JIF(pGB->RenderFile(wFile, NULL));

        // QueryInterface for DirectShow interfaces
        JIF(pGB->QueryInterface(IID_IMediaControl, (void **)&pMC));
        JIF(pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME));
        JIF(pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS));

        // Is this an audio-only file (no video component)?
        if (CheckVideoVisibility())
        {
            JIF(InitVideoWindow(1, 1));
        }
        else
        {
            // This sample requires a video clip to be loaded
            Msg(TEXT("This sample requires media with a video component.  ")
                TEXT("Please select another file."));
            return E_FAIL;
        }

        // Have the graph signal event via window callbacks for performance
        JIF(pME->SetNotifyWindow((OAHWND)ghApp, WM_GRAPHNOTIFY, 0));

        // Add the bitmap (static image or dynamic text) to the VMR's input
        if (g_dwTickerFlags & MARK_STATIC_IMAGE)
        {
            hr = BlendApplicationImage(ghApp);
            if (FAILED(hr))
                PostMessage(ghApp, WM_CLOSE, 0, 0);

            CheckMenuItem(ghMenu, ID_TICKER_STATIC_IMAGE, MF_CHECKED);
            CheckMenuItem(ghMenu, ID_TICKER_DYNAMIC_TEXT, MF_UNCHECKED);
        }
        else                                        // MARK_DYNAMIC_TEXT
        {
            if (!g_hFont)
                g_hFont = SetTextFont(FALSE);  // Don't display the Windows Font Select dialog

            // If the initial blend fails, post a close message to exit the app
            hr = BlendApplicationText(ghApp, g_szAppText);
            if (FAILED(hr))
                PostMessage(ghApp, WM_CLOSE, 0, 0);

            CheckMenuItem(ghMenu, ID_TICKER_STATIC_IMAGE, MF_UNCHECKED);
            CheckMenuItem(ghMenu, ID_TICKER_DYNAMIC_TEXT, MF_CHECKED);
        }

        // Complete the window setup
        ShowWindow(ghApp, SW_SHOWNORMAL);
        UpdateWindow(ghApp);
        SetForegroundWindow(ghApp);
        SetFocus(ghApp);

#ifdef REGISTER_FILTERGRAPH
        if (FAILED(AddGraphToRot(pGB, &g_dwGraphRegister)))
        {
            Msg(TEXT("Failed to register filter graph with ROT!"));
            g_dwGraphRegister = 0;
        }
#endif

        // Run the graph to play the media file
        JIF(pMC->Run());

        // Start animation by default
        PostMessage(ghApp, WM_COMMAND, ID_SLIDE, 0);
    }

    return hr;
}
Beispiel #6
0
HRESULT CAccessSys::StartPreview(HWND h)
{
	HRESULT hr;
	IBaseFilter *pSrcFilter = NULL;

	if (e_psCurrent == Running){
		StopPreview();
	}

	hr = BuildPreview();
	if (FAILED(hr)){
		return hr;
	}
	
	//// Get DirectShow interfaces
	//hr = GetInterfaces();
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
	//	return hr;
	//}

	//// Attach the filter graph to the capture graph
	//hr = p_capture_graph_builder2->SetFiltergraph(p_graph);
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
	//	return hr;
	//}

	//// Use the system device enumerator and class enumerator to find
	//// a video capture/preview device, such as a desktop USB video camera.
	//hr = FindCaptureDevice();
	//if (FAILED(hr))
	//{
	//	// Don't display a message because FindCaptureDevice will handle it
	//	return hr;
	//}

	//pSrcFilter = p_streams[0].p_device_filter;
	//// Add Capture filter to our graph.
	//hr = p_graph->AddFilter(pSrcFilter, L"Video Source");
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n")
	//		TEXT("If you have a working video capture device, please make sure\r\n")
	//		TEXT("that it is connected and is not being used by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

	// Render the preview pin on the video capture filter
	// Use this instead of g_pGraph->RenderFile
	//pSrcFilter = p_streams[0].p_device_filter;
	//hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
	//	pSrcFilter, NULL, NULL);
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
	//		TEXT("The capture device may already be in use by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

	//// Now that the filter has been added to the graph and we have
	//// rendered its stream, we can release this reference to the filter.
	//pSrcFilter->Release();

	//pSrcFilter = p_streams[1].p_device_filter;
	//// Add Capture filter to our graph.
	//hr = p_graph->AddFilter(pSrcFilter, L"Audio Source");
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n")
	//		TEXT("If you have a working video capture device, please make sure\r\n")
	//		TEXT("that it is connected and is not being used by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

	// Render the preview pin on the audio capture filter
	// Use this instead of g_pGraph->RenderFile
	//pSrcFilter = p_streams[1].p_device_filter;
	//hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Audio,
	//	pSrcFilter, NULL, NULL);
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
	//		TEXT("The capture device may already be in use by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

	//// Now that the filter has been added to the graph and we have
	//// rendered its stream, we can release this reference to the filter.
	//pSrcFilter->Release();

	// Set video window style and position
    if (h){
        h_wnd = h;

        hr = SetupVideoWindow(h_wnd);
        if (FAILED(hr))
        {
            Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
            return hr;
        }
    }

#ifdef REGISTER_FILTERGRAPH
	// Add our graph to the running object table, which will allow
	// the GraphEdit application to "spy" on our graph
	hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
		g_dwGraphRegister = 0;
	}
#endif

	// Start previewing video & audio data
	hr = p_control->Run();
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
		return hr;
	}
	DWORD err = GetLastError();

	// Remember current state
	e_psCurrent = Running;
	//ConnectToServer(m_userinfo.ip, m_userinfo.port);
	return S_OK;
}
Beispiel #7
0
// BuildGraph sets up devices, adds and connects filters
HRESULT
CBDAFilterGraph::BuildGraph(NETWORK_TYPE NetType)
{
    HRESULT hr = S_OK;
    m_NetworkType = NetType;

    // if we have already have a filter graph, tear it down
    if(m_fGraphBuilt)
    {
        if(m_fGraphRunning)
        {
            hr = StopGraph ();
        }

        hr = TearDownGraph ();
    }

    // STEP 1: load network provider first so that it can configure other
    // filters, such as configuring the demux to sprout output pins.
    // We also need to submit a tune request to the Network Provider so it will
    // tune to a channel
    if(FAILED (hr = LoadNetworkProvider()))
    {
        ErrorMessageBox(TEXT("Cannot load network provider\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    hr = m_pNetworkProvider->QueryInterface(__uuidof (ITuner), reinterpret_cast <void**> (&m_pITuner));
    if(FAILED (hr))
    {
        ErrorMessageBox(TEXT("pNetworkProvider->QI: Can't QI for ITuner.\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    // create a tune request to initialize the network provider
    // before connecting other filters
    CComPtr <IATSCChannelTuneRequest>  pATSCTuneRequest;
    if(FAILED (hr = CreateATSCTuneRequest(
        m_lPhysicalChannel,
        m_lMajorChannel, 
        m_lMinorChannel,
        &pATSCTuneRequest
        )))
    {
        ErrorMessageBox(TEXT("Cannot create tune request\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    //submit the tune request to the network provider
    hr = m_pITuner->put_TuneRequest(pATSCTuneRequest);
    if(FAILED(hr))
    {
        ErrorMessageBox(TEXT("Cannot submit the tune request\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }


    // STEP2: Load tuner device and connect to network provider
    if(FAILED (hr = LoadFilter (
        KSCATEGORY_BDA_NETWORK_TUNER, 
        &m_pTunerDevice,
        m_pNetworkProvider, 
        TRUE
        )))
    {
        ErrorMessageBox(TEXT("Cannot load tuner device and connect network provider\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    // STEP3: Load tuner device and connect to demodulator device
    if(FAILED (hr = LoadFilter (
        KSCATEGORY_BDA_RECEIVER_COMPONENT, 
        &m_pDemodulatorDevice,
        m_pTunerDevice, 
        TRUE
        )))
    {
        ErrorMessageBox(TEXT("Cannot load capture device and connect tuner\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    // Step4: Load capture device and connect to tuner device
    if(FAILED (hr = LoadFilter (
        KSCATEGORY_BDA_RECEIVER_COMPONENT, 
        &m_pCaptureDevice,
        m_pDemodulatorDevice, 
        TRUE
        )))
    {
        ErrorMessageBox(TEXT("Cannot load capture device and connect tuner\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    // Step5: Load demux
    if(FAILED (hr = LoadDemux()))
    {
        ErrorMessageBox(TEXT("Cannot load demux\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    //
    // this next call loads and connects filters associated with
    // the demultiplexor. if you want to manually load individual
    // filters such as audio and video decoders, use the code at
    // the bottom of this file
    //
#ifdef DEBUG
    hr = AddGraphToRot (m_pFilterGraph, &m_dwGraphRegister);
    if (FAILED(hr))
    {
        ///ErrorMessageBox(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        m_dwGraphRegister = 0;
    }
#endif
    //MessageBox (NULL, _T(""), _T(""), MB_OK);
    // Step6: Render demux pins
    if(FAILED (hr = RenderDemux()))
    {
        ErrorMessageBox(TEXT("Cannot load demux\n"));
        TearDownGraph();
        m_fGraphFailure = true;
        return hr;
    }

    m_fGraphBuilt = true;

    return S_OK;
}
Beispiel #8
0
BOOL CDShowCtrl::CreateRunGraph(LPCWSTR writeFilePath)
{
	this->runFlag = FALSE;
	if( this->graph == NULL ){
		return FALSE;
	}
	BOOL ret = TRUE;

	AddGraphToRot(this->graph, &m_dwRegister);
	HRESULT hr;

	for( size_t i=0; i<this->buffData.size(); i++ ){
		delete this->buffData[i];
	}
	this->buffData.clear();

	tsSrc = static_cast<CTSSrcFilter *>(CTSSrcFilter::CreateInstance(NULL, &hr));
	hr = tsSrc->QueryInterface(IID_IBaseFilter, reinterpret_cast<void**>(&this->bonSrc));
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = CoCreateInstance(IID_MSVideoDec, NULL, CLSCTX_INPROC_SERVER, 
		IID_IBaseFilter, (void**)&this->videoDec );
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = CoCreateInstance(IID_MSAudioDec, NULL, CLSCTX_INPROC_SERVER, 
		IID_IBaseFilter, (void**)&this->audioDec );
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}
	
	hr = CoCreateInstance(IID_ATIFileWriter, NULL, CLSCTX_INPROC_SERVER, 
		IID_IBaseFilter, (void**)&this->writeFile );
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	this->graph->AddFilter(this->bonSrc, L"TSSrc");
	this->graph->AddFilter(this->videoDec, L"Microsoft DTV-DVD Video Decoder");
	this->graph->AddFilter(this->audioDec, L"Microsoft DTV-DVD Audio Decoder");
	this->graph->AddFilter(this->writeFile, L"ATI MPEG File Writer");

	IFileSinkFilter2 *sink = 0;
	hr = this->writeFile->QueryInterface(IID_IFileSinkFilter2, (void**)&sink);
	hr = sink->SetFileName(writeFilePath, NULL);
	hr = sink->SetMode(AM_FILE_OVERWRITE);
	SAFE_RELEASE(sink);

	//pinのコネクト
	hr = ConnectFilters(this->graph, this->bonSrc, this->videoDec);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = ConnectFilters(this->graph, this->bonSrc, this->audioDec);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}
	
	hr = ConnectFilters(this->graph, this->videoDec, this->videoScaler);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = ConnectFilters(this->graph, this->videoScaler, this->videoEnc);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = ConnectFilters(this->graph, this->audioDec, this->audioEnc);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}
	
	hr = ConnectFilters(this->graph, this->videoEnc, this->muxer);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = ConnectFilters(this->graph, this->audioEnc, this->muxer);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}

	hr = ConnectFilters(this->graph, this->muxer, this->writeFile);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}
	
	hr = this->graph->QueryInterface(IID_IMediaControl, (void **)&this->mediaCtrl);
	if (FAILED(hr)){
		ret = FALSE;
		goto ErrEnd;
	}
	this->preCreateFlag = TRUE;
	this->preCount = 0;

	return TRUE;
ErrEnd:
	SAFE_RELEASE(this->bonSrc);
	SAFE_RELEASE(this->tsSrc);
	SAFE_RELEASE(this->videoDec);
	SAFE_RELEASE(this->audioDec);
	SAFE_RELEASE(this->writeFile);
	SAFE_RELEASE(this->videoScaler);
	SAFE_RELEASE(this->videoEnc);
	SAFE_RELEASE(this->audioEnc);
	SAFE_RELEASE(this->muxer);
	SAFE_RELEASE(this->graph);

	return ret;
}
Beispiel #9
0
HRESULT PlayMovieInWindow(LPTSTR szFile, BOOL bReOpenAfterLicenseAcquired)
{
    USES_CONVERSION;
    WCHAR wFile[MAX_PATH];
    HRESULT hr;

    // Check input string
    if (!szFile)
        return E_POINTER;

    // Clear open dialog remnants before calling RenderFile()
    UpdateWindow(ghApp);

    // Convert filename to wide character string
    wcsncpy(wFile, T2W(szFile), NUMELMS(wFile)-1);
    wFile[MAX_PATH-1] = 0;

    // First pass of rendering the media file.  If a DRM license must
    // be acquired before the file can be loaded, then the reopen flag
    // will be set.
    if( !bReOpenAfterLicenseAcquired )
    {
        // Get the interface for DirectShow's GraphBuilder
        JIF(CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
                             IID_IGraphBuilder, (void **)&pGB));

        JIF(pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME));

        if(SUCCEEDED(hr))
        {
            // Have the graph signal event via window callbacks
            //
            // Start this before we insert the reader filter, since we may need
            // to monitor DRM license acquistion messages on reader creation
            //
            JIF(pME->SetNotifyWindow((OAHWND)ghApp, WM_GRAPHNOTIFY, 0));

            // Use special handling for Windows Media files
            if (IsWindowsMediaFile(szFile))
            {
                // Load the improved ASF reader filter by CLSID
                hr = CreateFilter(CLSID_WMAsfReader, &g_pReader);
                if(FAILED(hr))
                {
                    Msg(TEXT("Failed to create WMAsfWriter filter!  hr=0x%x\0"), hr);
                    return hr;
                }

                // Add the ASF reader filter to the graph.  For ASF/WMV/WMA content,
                // this filter is NOT the default and must be added explicitly.
                hr = pGB->AddFilter(g_pReader, L"ASF Reader");
                if(FAILED(hr))
                {
                    Msg(TEXT("Failed to add ASF reader filter to graph!  hr=0x%x\0"), hr);
                    return hr;
                }

                // Create the key provider that will be used to unlock the WM SDK
                JIF(AddKeyProvider(pGB));
            
                // Create the DRM license event
                g_hLicenseEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
                if( !g_hLicenseEvent )
                {
                    return E_OUTOFMEMORY;
                }

                // Set its source filename
                JIF(g_pReader->QueryInterface(IID_IFileSourceFilter, (void **) &g_pFileSource));

                // Attempt to load this file
                hr = g_pFileSource->Load(wFile, NULL);

                // Handle Digital Rights Management (DRM) errors
                if(NS_E_LICENSE_REQUIRED == hr)
                {
                    Msg(TEXT("This media file is protected by DRM and needs a license.\r\n\r\n")
                        TEXT("Attempting to acquire a license...\0"));
                    g_bWaitingForLicense = TRUE;
                    return hr;
                }
                else if(NS_E_PROTECTED_CONTENT == hr)
                {
                    Msg(TEXT("This media file is protected by DRM and needs a license.\r\n\r\n")
                        TEXT("In order to play DRM-encoded content, you must acquire a DRM stub library\r\n")
                        TEXT("from Microsoft and link it with this application.  The default version of\r\n")
                        TEXT("the WMStub.lib library does not support Digital Rights Management (DRM)."));
                    return hr;
                }
                else if (FAILED(hr))
                {
                    Msg(TEXT("Failed to load file in source filter (g_pFileSource->Load())!  hr=0x%x\0"), hr);
                    return hr;
                }

                // Render the output pins of the ASF reader to build the
                // remainder of the graph automatically
                JIF(RenderOutputPins(pGB, g_pReader));

                // Since the graph is built and the filters are added to the graph,
                // the WM ASF reader interface can be released.
                g_pReader->Release();
                g_pReader = NULL;
            }

            // Not a Windows Media file, so just render the standard way
            else
            {
                // Have the graph builder construct the appropriate graph automatically
                JIF(pGB->RenderFile(wFile, NULL));
            }
        }
    }    
    else
    {
        hr = g_pFileSource->Load(wFile, NULL);
        if( SUCCEEDED( hr ) )
        {
            Msg(TEXT("Successfully loaded file after DRM license acquisition!"));

            // Render the output pins of the ASF reader to build the
            // remainder of the graph automatically
            JIF(RenderOutputPins(pGB, g_pReader));

            // Since the graph is built and the filters are added to the graph,
            // the WM ASF reader interface can be released.
            g_pReader->Release(); // not really necessary
            g_pReader = NULL;
        }
        else
        {
            Msg(TEXT("Failed to Load file after acquiring license!  hr=0x%x\0"), hr);
        }
    }

    if( SUCCEEDED( hr ) )
    {
        // QueryInterface for DirectShow interfaces
        JIF(pGB->QueryInterface(IID_IMediaControl, (void **)&pMC));
        JIF(pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS));

        // Query for video interfaces, which may not be relevant for audio files
        JIF(pGB->QueryInterface(IID_IVideoWindow, (void **)&pVW));
        JIF(pGB->QueryInterface(IID_IBasicVideo,  (void **)&pBV));

        // Query for audio interfaces, which may not be relevant for video-only files
        JIF(pGB->QueryInterface(IID_IBasicAudio, (void **)&pBA));

        // Is this an audio-only file (no video component)?
        CheckVisibility();

        if (!g_bAudioOnly)
        {
            // Setup the video window
            JIF(pVW->put_Owner((OAHWND)ghApp));
            JIF(pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN));

            JIF(InitVideoWindow(1, 1));
            GetFrameStepInterface();
        }
        else
        {
            // Initialize the default player size and enable playback menu items
            JIF(InitPlayerWindow());
            EnablePlaybackMenu(TRUE, AUDIO);
        }

        // Complete window initialization
        CheckSizeMenu(ID_FILE_SIZE_NORMAL);
        ShowWindow(ghApp, SW_SHOWNORMAL);
        UpdateWindow(ghApp);
        SetForegroundWindow(ghApp);
        g_bFullscreen = FALSE;
        UpdateMainTitle();

#ifdef REGISTER_FILTERGRAPH
        hr = AddGraphToRot(pGB, &g_dwGraphRegister);
        if (FAILED(hr))
        {
            Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
            g_dwGraphRegister = 0;
        }
#endif

        // Run the graph to play the media file
        JIF(pMC->Run());

        g_psCurrent=Running;
        SetFocus(ghApp);
    }

    return hr;
}
Beispiel #10
0
HRESULT PlayMovieInWindow(LPTSTR szFile)
{
    USES_CONVERSION;
    WCHAR wFile[MAX_PATH];
    HRESULT hr;

    // Clear open dialog remnants before calling RenderFile()
    UpdateWindow(ghApp);

    // Convert filename to wide character string
    wcscpy(wFile, T2W(szFile));

    // Get the interface for DirectShow's GraphBuilder
    JIF(CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
                         IID_IGraphBuilder, (void **)&pGB));

    if(SUCCEEDED(hr))
    {
        // Have the graph builder construct its the appropriate graph automatically
        JIF(pGB->RenderFile(wFile, NULL));

        // QueryInterface for DirectShow interfaces
        JIF(pGB->QueryInterface(IID_IMediaControl, (void **)&pMC));
        JIF(pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME));
        JIF(pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS));
        JIF(pGB->QueryInterface(IID_IMediaPosition, (void **)&pMP));

        // Query for video interfaces, which may not be relevant for audio files
        JIF(pGB->QueryInterface(IID_IVideoWindow, (void **)&pVW));
        JIF(pGB->QueryInterface(IID_IBasicVideo, (void **)&pBV));

        // Query for audio interfaces, which may not be relevant for video-only files
        JIF(pGB->QueryInterface(IID_IBasicAudio, (void **)&pBA));

        // Is this an audio-only file (no video component)?
        CheckVisibility();

        // Have the graph signal event via window callbacks for performance
        JIF(pME->SetNotifyWindow((OAHWND)ghApp, WM_GRAPHNOTIFY, 0));

        if (!g_bAudioOnly)
        {
            JIF(pVW->put_Owner((OAHWND)ghApp));
            JIF(pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN));

            JIF(InitVideoWindow(1, 1));
            GetFrameStepInterface();
        }
        else
        {
            JIF(InitPlayerWindow());
        }

        // Let's get ready to rumble!
        CheckSizeMenu(ID_FILE_SIZE_NORMAL);
        ShowWindow(ghApp, SW_SHOWNORMAL);
        UpdateWindow(ghApp);
        SetForegroundWindow(ghApp);
        SetFocus(ghApp);
        g_bFullscreen = FALSE;
        g_PlaybackRate = 1.0;
        UpdateMainTitle();

#ifdef REGISTER_FILTERGRAPH
        hr = AddGraphToRot(pGB, &g_dwGraphRegister);
        if (FAILED(hr))
        {
            Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
            g_dwGraphRegister = 0;
        }
#endif

        // Run the graph to play the media file
        JIF(pMC->Run());
        g_psCurrent=Running;

        SetFocus(ghApp);
    }

    return hr;
}
HRESULT PlayMovieInWindow(LPTSTR szFile)
{
    HRESULT hr;

    // Check input string
    if (szFile == NULL)
        return E_POINTER;

    // Clear open dialog remnants before calling RenderFile()
    UpdateWindow(ghApp);

    // Get the interface for DirectShow's GraphBuilder
    JIF(CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
                         IID_IGraphBuilder, (void **)&pGB));

    if(SUCCEEDED(hr))
    {
        SmartPtr <IBaseFilter> pVmr;

        // Create the Video Mixing Renderer and add it to the graph
        JIF(InitializeWindowlessVMR(&pVmr));

        // Render the file programmatically to use the VMR9 as renderer.
        // Pass TRUE to create an audio renderer also.
        if (FAILED(hr = RenderFileToVideoRenderer(pGB, szFile, TRUE)))
            return hr;

        // QueryInterface for DirectShow interfaces
        JIF(pGB->QueryInterface(IID_IMediaControl, (void **)&pMC));
        JIF(pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME));
        JIF(pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS));
        JIF(pGB->QueryInterface(IID_IBasicAudio, (void **)&pBA));

        // Is this an audio-only file (no video component)?
        if (CheckVideoVisibility())
        {
            JIF(InitVideoWindow(1, 1));
        }
        else
        {
            // This sample requires a video clip to be loaded
            Msg(TEXT("This sample requires media with a video component.  ")
                TEXT("Please select another file."));
            return E_FAIL;
        }

        // Add the bitmap to the VMR's input
        BlendApplicationImage(ghApp);

        // Have the graph signal event via window callbacks for performance
        JIF(pME->SetNotifyWindow((OAHWND)ghApp, WM_GRAPHNOTIFY, 0));

        // Complete the window setup
        ShowWindow(ghApp, SW_SHOWNORMAL);
        UpdateWindow(ghApp);
        SetForegroundWindow(ghApp);
        SetFocus(ghApp);
        UpdateMainTitle();

#ifdef REGISTER_FILTERGRAPH
        hr = AddGraphToRot(pGB, &g_dwGraphRegister);
        if (FAILED(hr))
        {
            Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
            g_dwGraphRegister = 0;
        }
#endif

        // Run the graph to play the media file
        JIF(pMC->Run());
        g_psCurrent=Running;
    }

    return hr;
}
Beispiel #12
0
HRESULT CaptureVideo()
{
    HRESULT hr;
    IBaseFilter *pSrcFilter=NULL;

    // Get DirectShow interfaces
    hr = GetInterfaces();
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
        return hr;
    }

    // Attach the filter graph to the capture graph
    hr = g_pCapture->SetFiltergraph(g_pGraph);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to set capture filter graph!  hr=0x%x"), hr);
        return hr;
    }

    // Use the system device enumerator and class enumerator to find
    // a video capture/preview device, such as a desktop USB video camera.
    hr = FindCaptureDevice(&pSrcFilter);
    if (FAILED(hr))
    {
        // Don't display a message because FindCaptureDevice will handle it
        return hr;
    }
   
    // Add Capture filter to our graph.
    hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't add the capture filter to the graph!  hr=0x%x\r\n\r\n") 
            TEXT("If you have a working video capture device, please make sure\r\n")
            TEXT("that it is connected and is not being used by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }


    // Copied code
    //========================================
    IAMStreamConfig *pSC;

    hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Interleaved,
                                      pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);

    if(FAILED(hr))
        hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
                                      &MEDIATYPE_Video, pSrcFilter,
                                      IID_IAMStreamConfig, (void **)&pSC);

    if (!pSC) {
        return hr;
    }

    int iCount = 0, iSize = 0;
    hr = pSC->GetNumberOfCapabilities(&iCount, &iSize);

    // Check the size to make sure we pass in the correct structure.
    if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
    {
        // Use the video capabilities structure.

        int i = 0;

        for (int iFormat = 0; iFormat < iCount; iFormat++)
        {
            VIDEO_STREAM_CONFIG_CAPS scc;
            AM_MEDIA_TYPE *pmtConfig;

            hr = pSC->GetFormat(&pmtConfig);

            VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            double fps = 30;

            pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps);

            pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader); 

            pvi->bmiHeader.biWidth = 1920;

            pvi->bmiHeader.biHeight = 1080;

            hr = pSC->SetFormat(pmtConfig);

            

            //hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
            //if (SUCCEEDED(hr))
            //{
            //    /* Examine the format, and possibly use it. */
            //    if (pmtConfig->formattype == FORMAT_VideoInfo) {
            //        long width = HEADER(pmtConfig->pbFormat)->biWidth;
            //        long height = HEADER(pmtConfig->pbFormat)->biHeight;

            //        

            //        if (width == 1920 && height == 1080) {
            //            VIDEOINFOHEADER *info = (VIDEOINFOHEADER *)pmtConfig->pbFormat;

            //            if (i == 0) {
            //                pSC->SetFormat(pmtConfig);
            //                DeleteMediaType(pmtConfig);
            //                break;
            //            }
            //            i++;
            //        }
            //    }

            //    // Delete the media type when you are done.
            //    DeleteMediaType(pmtConfig);
            //}
        }
    }

    if(SUCCEEDED(hr)) {
        pSC->Release();
    }

    

    //========================================

    // Render the preview pin on the video capture filter
    // Use this instead of g_pGraph->RenderFile
    hr = g_pCapture->RenderStream (&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
                                   pSrcFilter, NULL, NULL);
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
            TEXT("The capture device may already be in use by another application.\r\n\r\n")
            TEXT("The sample will now close."), hr);
        pSrcFilter->Release();
        return hr;
    }

    // Now that the filter has been added to the graph and we have
    // rendered its stream, we can release this reference to the filter.
    pSrcFilter->Release();

    // Set video window style and position
    hr = SetupVideoWindow();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't initialize video window!  hr=0x%x"), hr);
        return hr;
    }

#ifdef REGISTER_FILTERGRAPH
    // Add our graph to the running object table, which will allow
    // the GraphEdit application to "spy" on our graph
    hr = AddGraphToRot(g_pGraph, &g_dwGraphRegister);
    if (FAILED(hr))
    {
        Msg(TEXT("Failed to register filter graph with ROT!  hr=0x%x"), hr);
        g_dwGraphRegister = 0;
    }
#endif

    // Start previewing video data
    hr = g_pMC->Run();
    if (FAILED(hr))
    {
        Msg(TEXT("Couldn't run the graph!  hr=0x%x"), hr);
        return hr;
    }

    // Remember current state
    g_psCurrent = Running;
        
    return S_OK;
}