예제 #1
0
HRESULT CreateOutputNode(
    IMFStreamDescriptor *pSourceSD, 
    HWND hwndVideo,
    IMFTopologyNode **ppNode
    )
{   

    IMFTopologyNode *pNode = NULL;
    IMFMediaTypeHandler *pHandler = NULL;
    IMFActivate *pRendererActivate = NULL;

    GUID guidMajorType = GUID_NULL;
    HRESULT hr = S_OK;

    // Get the stream ID.
    DWORD streamID = 0;
    pSourceSD->GetStreamIdentifier(&streamID); // Just for debugging, ignore any failures.

    // Get the media type handler for the stream.
    CHECK_HR(hr = pSourceSD->GetMediaTypeHandler(&pHandler));
    
    // Get the major media type.
    CHECK_HR(hr = pHandler->GetMajorType(&guidMajorType));

    // Create a downstream node.
    CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));

    // Create an IMFActivate object for the renderer, based on the media type.
    if (MFMediaType_Audio == guidMajorType)
    {
        // Create the audio renderer.
        TRACE((L"Stream %d: audio stream\n", streamID));
        CHECK_HR(hr = MFCreateAudioRendererActivate(&pRendererActivate));
    }
    else if (MFMediaType_Video == guidMajorType)
    {
        // Create the video renderer.
        TRACE((L"Stream %d: video stream\n", streamID));
        CHECK_HR(hr = MFCreateVideoRendererActivate(hwndVideo, &pRendererActivate));
    }
    else
    {
        TRACE((L"Stream %d: Unknown format\n", streamID));
        CHECK_HR(hr = E_FAIL);
    }

    // Set the IActivate object on the output node.
    CHECK_HR(hr = pNode->SetObject(pRendererActivate));

    // Return the IMFTopologyNode pointer to the caller.
    *ppNode = pNode;
    (*ppNode)->AddRef();

done:
    SAFE_RELEASE(pNode);
    SAFE_RELEASE(pHandler);
    SAFE_RELEASE(pRendererActivate);
    return hr;
}
예제 #2
0
HRESULT tTVPMFPlayer::CreateMediaSinkActivate( IMFStreamDescriptor *pSourceSD, HWND hVideoWindow, IMFActivate **ppActivate ) {
	HRESULT hr;
	CComPtr<IMFMediaTypeHandler> pHandler;
	// Get the media type handler for the stream.
    if( FAILED(hr = pSourceSD->GetMediaTypeHandler(&pHandler)) ) {
		TVPThrowExceptionMessage(L"Faild to get media type handler.");
	}
	// Get the major media type.
    GUID guidMajorType;
    if( FAILED(hr = pHandler->GetMajorType(&guidMajorType)) ) {
		TVPThrowExceptionMessage(L"Faild to get major type.");
	}
    CComPtr<IMFActivate>		pActivate;
	if( MFMediaType_Audio == guidMajorType ) {
		// Create the audio renderer.
        if( FAILED(hr = MFCreateAudioRendererActivate(&pActivate) )) {
			TVPThrowExceptionMessage(L"Faild to create audio render.");
		}
	} else if( MFMediaType_Video == guidMajorType ) {
		// Get FPS
		CComPtr<IMFMediaType> pMediaType;
		if( SUCCEEDED(hr = pHandler->GetCurrentMediaType(&pMediaType)) ) {
			hr = MFGetAttributeRatio( pMediaType, MF_MT_FRAME_RATE, &FPSNumerator, &FPSDenominator );
		}

        // Create the video renderer.
        if( FAILED(hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate) ) ) {
			TVPThrowExceptionMessage(L"Faild to create video render.");
		}
		// ここでカスタムEVRをつなぐようにすると自前で色々描画できるようになる
		// 現状は標準のものを使っている
#if 0
		tTVPEVRCustomPresenter* my_activate_obj = new tTVPEVRCustomPresenter(hr);
		my_activate_obj->AddRef();
		CComPtr<IUnknown> unk;
		my_activate_obj->QueryInterface( IID_IUnknown, (void**)&unk );
		if( FAILED(hr = pActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, unk)) ) {
			my_activate_obj->Release();
			TVPThrowExceptionMessage(L"Faild to add custom EVR presenter video render.");
		}
		my_activate_obj->Release();
#endif
	} else {
		hr = E_FAIL;
	}
	if( SUCCEEDED(hr) ) {
		// Return IMFActivate pointer to caller.
		*ppActivate = pActivate;
		(*ppActivate)->AddRef();
	}
	return hr;
}
예제 #3
0
// Create a renderer for the media type on the given stream descriptor
HRESULT CTedMediaFileRenderer::CreateRendererForStream(IMFStreamDescriptor* pSD, IMFTopologyNode** ppRendererNode)
{
    HRESULT hr;
    CComPtr<IMFMediaTypeHandler> spMediaTypeHandler;
    CComPtr<IMFActivate> spRendererActivate;
    CComPtr<IMFMediaSink> spRendererSink;
    CComPtr<IMFStreamSink> spRendererStreamSink;
    IMFTopologyNode* pRendererNode;
    GUID gidMajorType;

    IFC( MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pRendererNode) );
    
    IFC( pSD->GetMediaTypeHandler( &spMediaTypeHandler ) );
    IFC( spMediaTypeHandler->GetMajorType( &gidMajorType ) );

    if(MFMediaType_Audio == gidMajorType) 
    {
        IFC( MFCreateAudioRendererActivate(&spRendererActivate) );
        IFC( spRendererActivate->ActivateObject(IID_IMFMediaSink, (void**) &spRendererSink) );
        IFC( spRendererSink->GetStreamSinkById(0, &spRendererStreamSink) );
        IFC( pRendererNode->SetObject(spRendererStreamSink) );
    }
    else if(MFMediaType_Video == gidMajorType)
    {
        HWND hVideoWindow;
        IFC( m_spVideoWindowHandler->GetVideoWindow((LONG_PTR*) &hVideoWindow) );
        IFC( MFCreateVideoRendererActivate(hVideoWindow, &spRendererActivate) );
        IFC( spRendererActivate->ActivateObject(IID_IMFMediaSink, (void**) &spRendererSink) );
        IFC( spRendererSink->GetStreamSinkById(0, &spRendererStreamSink) );
        IFC( pRendererNode->SetObject(spRendererStreamSink) );
    }
    else
    {
        // Do not have renderers for any other major types
    }
    
    *ppRendererNode = pRendererNode;
Cleanup:
    return hr;
}
void MFAudioEndpointControl::setActiveOutput(const QString &name)
{
    if (m_activeEndpoint == name)
        return;
    QMap<QString, LPWSTR>::iterator it = m_devices.find(name);
    if (it == m_devices.end())
        return;

    LPWSTR wstrID = *it;
    IMFActivate *activate = NULL;
    HRESULT hr = MFCreateAudioRendererActivate(&activate);
    if (FAILED(hr)) {
        qWarning() << "Failed to create audio renderer activate";
        return;
    }

    if (wstrID) {
        hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, wstrID);
    } else {
        //This is the default one that has been inserted in updateEndpoints(),
        //so give the activate a hint that we want to use the device for multimedia playback
        //then the media foundation will choose an appropriate one.

        //from MSDN:
        //The ERole enumeration defines constants that indicate the role that the system has assigned to an audio endpoint device.
        //eMultimedia: Music, movies, narration, and live music recording.
        hr = activate->SetUINT32(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ROLE, eMultimedia);
    }

    if (FAILED(hr)) {
        qWarning() << "Failed to set attribute for audio device" << name;
        return;
    }

    if (m_currentActivate)
        m_currentActivate->Release();
    m_currentActivate = activate;
    m_activeEndpoint = name;
}
예제 #5
0
파일: player.cpp 프로젝트: sfpgmr/2dx
    IMFActivatePtr CreateMediaSinkActivate(
      IMFStreamDescriptor* pSourceSD,     // Pointer to the stream descriptor.
      HWND hVideoWindow                 // Handle to the video clipping window.
      )
    {
      IMFMediaTypeHandlerPtr pHandler;
      IMFActivatePtr pActivate;

      // Get the media type handler for the stream.
      THROW_IF_ERR(pSourceSD->GetMediaTypeHandler(pHandler.GetAddressOf()));

      // Get the major media type.
      GUID guidMajorType;
      THROW_IF_ERR(pHandler->GetMajorType(&guidMajorType));

      // Create an IMFActivate object for the renderer, based on the media type.
      if (MFMediaType_Audio == guidMajorType)
      {
        // Create the audio renderer.
        THROW_IF_ERR(MFCreateAudioRendererActivate(pActivate.GetAddressOf()));
      }
      else if (MFMediaType_Video == guidMajorType)
      {
        // Create the video renderer.
        THROW_IF_ERR(MFCreateVideoRendererActivate(hVideoWindow, pActivate.GetAddressOf()));
      }
      else
      {
        // Unknown stream type. 
        THROW_IF_ERR(E_FAIL);
        // Optionally, you could deselect this stream instead of failing.
      }

      // Return IMFActivate pointer to caller.
      return pActivate;
    }