// EVR専用 : 入力ストリーム数を設定する(通常は1) bool DirectShowUtil::MF_SetNumberOfStreams(IBaseFilter *pEvr,int iStreamNumber) { IEVRFilterConfig *pFilterConfig = MF_GetEVRFilterConfig(pEvr); if(pFilterConfig) { pFilterConfig->SetNumberOfStreams(iStreamNumber); SAFE_RELEASE(pFilterConfig); return true; } return false; }
/* * Class: sage_DShowMediaPlayer * Method: setVideoRendererFilter0 * Signature: (JLjava/lang/String;Ljava/util/Map;)V */ JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoRendererFilter0 (JNIEnv *env, jobject jo, jlong dataPtr, jstring jfilterName, jobject jfilterOptions) { if (jfilterName == NULL || env->GetStringLength(jfilterName) == 0 || !dataPtr){return;} CPlayerData* pData = (CPlayerData*) dataPtr; const char* cName = env->GetStringUTFChars(jfilterName, NULL); slog((env, "DShowPlayer setVideoRendererFilter0(%s) called\r\n", cName)); CComPtr<IBaseFilter> pFilter = NULL; HRESULT hr = FindFilterByName(&pFilter, CLSID_LegacyAmFilterCategory, cName); env->ReleaseStringUTFChars(jfilterName, cName); BOOL vmr9Config = FALSE; BOOL evrConfig = FALSE; if (SUCCEEDED(hr) && jfilterOptions) { jint dxvaMode = 0; jint dxvaDeinterlace = 0; GetMapIntValue(env, jfilterOptions, "dxva_mpeg_mode", &dxvaMode); GetMapIntValue(env, jfilterOptions, "force_deinterlace", &dxvaDeinterlace); pData->SetDXVAParameters(dxvaMode, dxvaDeinterlace); jboolean ccOK = JNI_TRUE; GetMapBoolValue(env, jfilterOptions, "enable_cc", &ccOK); if (!ccOK) pData->DisableCC(); // Get the DX9 device pointers, if they don't exist we can't use our custom VMR9 renderer jlong jD3D = 0; jlong jD3DDevice = 0; if (GetMapLongValue(env, jfilterOptions, "d3d_object_ptr", &jD3D) && GetMapLongValue(env, jfilterOptions, "d3d_device_ptr", &jD3DDevice)) { IDirect3D9* pD3D = (IDirect3D9*) jD3D; IDirect3DDevice9* pD3DDevice = (IDirect3DDevice9*) jD3DDevice; // Set the rendering mode and number of streams. CComPtr<IVMRFilterConfig9> pConfig = NULL; // See if it's EVR or VMR hr = pFilter->QueryInterface(IID_IVMRFilterConfig9, (void**)&(pConfig.p)); if (SUCCEEDED(hr)) { slog((env, "Using VMR9 for video rendering\r\n")); hr = pConfig->SetRenderingMode(VMR9Mode_Renderless); PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL); /* * NOTE: If we don't set the number of streams than we don't get the optimal * format types as choices and end up using a private texture when we don't need to. * I think this is because certain features of the * VMR are not available in mixing mode or something like that. * Update: 10/12/2004 - I have now learned that when you put the VMR9 * into mixing mode that it will then use the D3DRenderTarget itself * to do the mixing. I saw a usenet post of the exact VMR9 corruption * problem I was having where the OSD was showing up on the video frame surface. * By not setting the number of streams I keep the VMR9 in Renderless non-mixing mode. * BUT this has the downside of breaking CC support for the VMR9 so we have a registry * setting to allow this. * 10/13/04 - The first problem came back where the format types are wrong. No idea * why this was working fine yesterday. */ if (GetRegistryDword(HKEY_LOCAL_MACHINE, "Software\\Frey Technologies\\SageTV\\DirectX9", "AllowCCForVMR9", 1) && ccOK) { // NOTE: We changed this from 2 to 3 because on Vista you need another input // to handle subpicture blending for DVD playback. And I don't believe there's any // negative to having 3 instead of 2; the big difference is between 1 and 2. hr = pConfig->SetNumberOfStreams(3); // video + CC + subpicture PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL); } else { hr = pConfig->SetNumberOfStreams(1); PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL); } CComPtr<IVMRSurfaceAllocatorNotify9> lpIVMRSurfAllocNotify = NULL; pFilter->QueryInterface(IID_IVMRSurfaceAllocatorNotify9, (void**)&(lpIVMRSurfAllocNotify.p)); // create our surface allocator CVMRAllocator* myVMRAllocator = new CVMRAllocator(hr, pD3D, pD3DDevice); PLAYEXCEPT_RET(sage_PlaybackException_SAGETV_INSTALL); pData->SetVMR9Allocator(myVMRAllocator); // let the allocator and the notify know about each other hr = lpIVMRSurfAllocNotify->AdviseSurfaceAllocator(0xCAFEBABE, myVMRAllocator); HTESTPRINT(hr); hr = myVMRAllocator->AdviseNotify(lpIVMRSurfAllocNotify); HTESTPRINT(hr); hr = S_OK; vmr9Config = TRUE; } else { slog((env, "Using EVR for video render\r\n")); evrConfig = TRUE; hr = S_OK; } } } if (SUCCEEDED(hr)) hr = pData->SetVideoRenderer(pFilter); if (SUCCEEDED(hr) && evrConfig) { // Configure the EVR presenter after we add the EVR to the filter graph jlong jD3DDevMgr = 0; if (GetMapLongValue(env, jfilterOptions, "d3d_device_mgr", &jD3DDevMgr)) { CComPtr<IMFVideoRenderer> lpIMFVideoRenderer = NULL; hr = pFilter->QueryInterface(IID_IMFVideoRenderer, (void**)&(lpIMFVideoRenderer.p)); HTESTPRINT(hr); if (SUCCEEDED(hr)) { // Configure EVR to use our custom presenter CComPtr<IMFVideoPresenter> lpIMFVideoPresenter = NULL; hr = CoCreateInstance(CLSID_CustomEVRPresenter, NULL, CLSCTX_INPROC_SERVER, IID_IMFVideoPresenter, (void**)&(lpIMFVideoPresenter.p)); HTESTPRINT(hr); if (SUCCEEDED(hr)) { // Set the Direct3D device pointer IDirect3DDeviceManager9* pD3DDevMgr = (IDirect3DDeviceManager9*) jD3DDevMgr; ISTVEVRPrstr* pMyEvr = NULL; lpIMFVideoPresenter->QueryInterface(IID_ISTVEVRPrstr, (void**)&pMyEvr); pMyEvr->set_D3DDeviceMgr(pD3DDevMgr); hr = lpIMFVideoRenderer->InitializeRenderer(NULL, lpIMFVideoPresenter); HTESTPRINT(hr); SAFE_RELEASE(pMyEvr); IEVRFilterConfig* pEvrConfig = NULL; hr = pFilter->QueryInterface(IID_IEVRFilterConfig, (void**)&pEvrConfig); HTESTPRINT(hr); // Try three inputs for now; one for video, one for CC and one for subpicture // But only use 3 on Vista by default since we've seen issues w/ it on XP OSVERSIONINFOEX osInfo; ZeroMemory(&osInfo, sizeof(OSVERSIONINFOEX)); osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX); DWORD evrInputsDefault = 1; if (GetVersionEx((LPOSVERSIONINFO)&osInfo)) { if (osInfo.dwMajorVersion >= 6) evrInputsDefault = 3; } DWORD evrInputs = GetRegistryDword(HKEY_LOCAL_MACHINE, "Software\\Frey Technologies\\SageTV\\DirectX9", "EVRInputPins", evrInputsDefault); slog((env, "Using %d input pins on the EVR\r\n", (int)evrInputs)); pEvrConfig->SetNumberOfStreams(evrInputs < 1 ? 1 : (evrInputs > 3 ? 3 : evrInputs)); SAFE_RELEASE(pEvrConfig); slog((env, "Finished with EVR configuration OK\r\n")); } } } } else if (SUCCEEDED(hr) && vmr9Config) { IVMRDeinterlaceControl9* pVmrDeint = NULL; hr = pFilter->QueryInterface(IID_IVMRDeinterlaceControl9, (void**)&pVmrDeint); if (SUCCEEDED(hr)) { slog(("Setting up VMR9 deinterlacing\r\n")); hr = pVmrDeint->SetDeinterlacePrefs(DeinterlacePref9_NextBest); HTESTPRINT(hr); /* VMR9VideoDesc VideoDesc; DWORD dwNumModes = 0; AM_MEDIA_TYPE vmrConn; hr = renderInput->ConnectionMediaType(&vmrConn); HTESTPRINT(hr); if (vmrConn.formattype == FORMAT_VideoInfo2) { VIDEOINFOHEADER2* vih2 = (VIDEOINFOHEADER2*) vmrConn.pbFormat; // Fill in the VideoDesc structure VideoDesc.dwSize = sizeof(VMR9VideoDesc); VideoDesc.dwSampleWidth = vih2->bmiHeader.biWidth; VideoDesc.dwSampleHeight = vih2->bmiHeader.biHeight; VideoDesc.SampleFormat = ConvertInterlaceFlags(vih2->dwInterlaceFlags); VideoDesc.dwFourCC = vih2->bmiHeader.biCompression; VideoDesc.InputSampleFreq.dwNumerator = 30000; VideoDesc.InputSampleFreq.dwDenominator = 1001; VideoDesc.OutputFrameFreq.dwNumerator = 60000; VideoDesc.OutputFrameFreq.dwDenominator = 1001; hr = pVmrDeint->GetNumberOfDeinterlaceModes(&VideoDesc, &dwNumModes, NULL); HTESTPRINT(hr); if (SUCCEEDED(hr) && (dwNumModes != 0)) { // Allocate an array for the GUIDs that identify the modes. GUID *pModes = new GUID[dwNumModes]; if (pModes) { // Fill the array. hr = pVmrDeint->GetNumberOfDeinterlaceModes(&VideoDesc, &dwNumModes, pModes); if (SUCCEEDED(hr)) { HTESTPRINT(hr); // Loop through each item and get the capabilities. for (DWORD i = 0; i < dwNumModes; i++) { VMR9DeinterlaceCaps Caps; ZeroMemory(&Caps, sizeof(Caps)); Caps.dwSize = sizeof(VMR9DeinterlaceCaps); hr = pVmrDeint->GetDeinterlaceModeCaps(&(pModes[i]), &VideoDesc, &Caps); HTESTPRINT(hr); if (SUCCEEDED(hr)) { if (Caps.DeinterlaceTechnology == DeinterlaceTech9_BOBLineReplicate) slog(("VM9Deinterlacing Tech: BOBLineReplicate\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_BOBVerticalStretch) slog(("VM9Deinterlacing Tech: BOBVerticalStretch\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_MedianFiltering) slog(("VM9Deinterlacing Tech: MedianFiltering\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_EdgeFiltering) slog(("VM9Deinterlacing Tech: EdgeFiltering\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_FieldAdaptive) slog(("VM9Deinterlacing Tech: FieldAdaptive\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_PixelAdaptive) slog(("VM9Deinterlacing Tech: PixelAdaptive\r\n")); else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_MotionVectorSteered) slog(("VM9Deinterlacing Tech: MotionVectorSteered\r\n")); else slog(("VM9Deinterlacing Tech: Proprietary...\r\n")); } } if (dwNumModes) { // hr = pVmrDeint->SetDeinterlaceMode(0, pModes); // HTESTPRINT(hr); } } delete [] pModes; } } FreeMediaType(vmrConn); } GUID realDeint; hr = pVmrDeint->GetActualDeinterlaceMode(0, &realDeint); LPOLESTR psz; StringFromCLSID(realDeint, &psz); char conv[64]; WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0); CoTaskMemFree(psz); slog(("Actual deinterlace: hr=0x%x guid=%s\r\n", hr, conv)); GUID setDeint; hr = pVmrDeint->GetDeinterlaceMode(0, &setDeint); StringFromCLSID(setDeint, &psz); WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0); CoTaskMemFree(psz); slog(("deinterlace mode: hr=0x%x guid=%s\r\n", hr, conv)); if (hr == S_FALSE) { slog(("Setting deinterlace mode to actual mode...\r\n")); hr = pVmrDeint->SetDeinterlaceMode(0, &realDeint); hr = pVmrDeint->GetDeinterlaceMode(0, &setDeint); StringFromCLSID(setDeint, &psz); WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0); CoTaskMemFree(psz); slog(("deinterlace mode: hr=0x%x guid=%s\r\n", hr, conv)); } */ pVmrDeint->Release(); } DWORD vmrMixMode = GetRegistryDword(HKEY_LOCAL_MACHINE, "Software\\Frey Technologies\\SageTV\\DirectX9", "YUVMixing", 1); if (vmrMixMode) { IVMRMixerControl9* vmrMix = NULL; hr = pFilter->QueryInterface(IID_IVMRMixerControl9, (void**)&vmrMix); if (SUCCEEDED(hr)) { DWORD currPrefs = 0; hr = vmrMix->GetMixingPrefs(&currPrefs); slog((env, "Curr Mix Prefs=0x%x\r\n", currPrefs)); currPrefs &= ~MixerPref9_RenderTargetMask; currPrefs |= MixerPref9_RenderTargetYUV; hr = vmrMix->SetMixingPrefs(currPrefs); vmrMixMode = currPrefs; vmrMix->GetMixingPrefs(&currPrefs); slog((env, "Set to 0x%x, hr=0x%x, New Mix Prefs=0x%x\r\n", vmrMixMode, hr, currPrefs)); vmrMix->Release(); } else HTESTPRINT(hr); } } if (FAILED(hr)) { elog((env, "Could not add specified video rendering filter to graph hr=0x%x\r\n", hr)); } }
bool CVideoRenderer_EVR::Initialize(IGraphBuilder *pFilterGraph,IPin *pInputPin,HWND hwndRender,HWND hwndMessageDrain) { #ifdef EVR_USE_VIDEO_WINDOW static bool fRegistered=false; HINSTANCE hinst=GetWindowInstance(hwndRender); if (!fRegistered) { WNDCLASS wc; wc.style=CS_DBLCLKS | CS_HREDRAW | CS_VREDRAW; wc.lpfnWndProc=VideoWndProc; wc.cbClsExtra=0; wc.cbWndExtra=0; wc.hInstance=hinst; wc.hIcon=NULL; wc.hCursor=NULL; wc.hbrBackground=CreateSolidBrush(RGB(0,0,0)); wc.lpszMenuName=NULL; wc.lpszClassName=EVR_VIDEO_WINDOW_CLASS; if (::RegisterClass(&wc)==0) { SetError(TEXT("EVRウィンドウクラスを登録できません。")); return false; } fRegistered=true; } m_hwndVideo=::CreateWindowEx(0,EVR_VIDEO_WINDOW_CLASS,NULL, WS_CHILD | WS_VISIBLE | WS_CLIPSIBLINGS,0,0,0,0, hwndRender,NULL,hinst,this); if (m_hwndVideo==NULL) { SetError(TEXT("EVRウィンドウを作成できません。")); return false; } #endif HRESULT hr; // MFStartupは呼ばなくていいらしい /* m_hMFPlatLib=::LoadLibrary(TEXT("mfplat.dll")); if (m_hMFPlatLib==NULL) { SetError(TEXT("mfplat.dllをロードできません。")); return false; } MFStartupFunc pStartup=reinterpret_cast<MFStartupFunc>(::GetProcAddress(m_hMFPlatLib,"MFStartup")); if (pStartup==NULL) { SetError(TEXT("MFStartup関数のアドレスを取得できません。")); goto OnError; } hr=pStartup(MF_VERSION,MFSTARTUP_LITE); if (FAILED(hr)) { SetError(TEXT("Media Foundationの初期化ができません。")); goto OnError; } */ hr=::CoCreateInstance(CLSID_EnhancedVideoRenderer,NULL,CLSCTX_INPROC_SERVER, IID_IBaseFilter,reinterpret_cast<LPVOID*>(&m_pRenderer)); if (FAILED(hr)) { SetError(hr,TEXT("EVRのインスタンスを作成できません。"), TEXT("システムがEVRに対応していない可能性があります。")); goto OnError; } IEVRFilterConfig *pFilterConfig; hr=m_pRenderer->QueryInterface(IID_IEVRFilterConfig,reinterpret_cast<LPVOID*>(&pFilterConfig)); if (FAILED(hr)) { SetError(hr,TEXT("IEVRFilterConfigを取得できません。")); goto OnError; } pFilterConfig->SetNumberOfStreams(1); pFilterConfig->Release(); hr=pFilterGraph->AddFilter(m_pRenderer,L"EVR"); if (FAILED(hr)) { SetError(hr,TEXT("EVRをフィルタグラフに追加できません。")); goto OnError; } IFilterGraph2 *pFilterGraph2; hr=pFilterGraph->QueryInterface(IID_IFilterGraph2, reinterpret_cast<LPVOID*>(&pFilterGraph2)); if (FAILED(hr)) { SetError(hr,TEXT("IFilterGraph2を取得できません。")); goto OnError; } hr=pFilterGraph2->RenderEx(pInputPin, AM_RENDEREX_RENDERTOEXISTINGRENDERERS,NULL); pFilterGraph2->Release(); if (FAILED(hr)) { SetError(hr,TEXT("映像レンダラを構築できません。")); goto OnError; } IMFGetService *pGetService; hr=m_pRenderer->QueryInterface(IID_IMFGetService,reinterpret_cast<LPVOID*>(&pGetService)); if (FAILED(hr)) { SetError(hr,TEXT("IMFGetServiceを取得できません。")); goto OnError; } IMFVideoDisplayControl *pDisplayControl; hr=pGetService->GetService(MR_VIDEO_RENDER_SERVICE,IID_IMFVideoDisplayControl,reinterpret_cast<LPVOID*>(&pDisplayControl)); if (FAILED(hr)) { pGetService->Release(); SetError(hr,TEXT("IMFVideoDisplayControlを取得できません。")); goto OnError; } #ifdef EVR_USE_VIDEO_WINDOW pDisplayControl->SetVideoWindow(m_hwndVideo); #else pDisplayControl->SetVideoWindow(hwndRender); #endif pDisplayControl->SetAspectRatioMode(MFVideoARMode_None); /* RECT rc; ::GetClientRect(hwndRender,&rc); pDisplayControl->SetVideoPosition(NULL,&rc); */ pDisplayControl->SetBorderColor(RGB(0,0,0)); pDisplayControl->Release(); IMFVideoProcessor *pVideoProcessor; hr=pGetService->GetService(MR_VIDEO_MIXER_SERVICE,IID_IMFVideoProcessor,reinterpret_cast<LPVOID*>(&pVideoProcessor)); if (FAILED(hr)) { pGetService->Release(); SetError(hr,TEXT("IMFVideoProcessorを取得できません。")); goto OnError; } pVideoProcessor->SetBackgroundColor(RGB(0,0,0)); /* UINT NumModes; GUID *pProcessingModes; if (SUCCEEDED(pVideoProcessor->GetAvailableVideoProcessorModes(&NumModes,&pProcessingModes))) { #ifdef _DEBUG for (UINT i=0;i<NumModes;i++) { DXVA2_VideoProcessorCaps Caps; if (SUCCEEDED(pVideoProcessor->GetVideoProcessorCaps(&pProcessingModes[i],&Caps))) { TRACE(TEXT("EVR Video Processor %u\n"),i); TRACE(TEXT("DeviceCaps : %s\n"), Caps.DeviceCaps==DXVA2_VPDev_EmulatedDXVA1? TEXT("DXVA2_VPDev_EmulatedDXVA1"): Caps.DeviceCaps==DXVA2_VPDev_HardwareDevice? TEXT("DXVA2_VPDev_HardwareDevice"): Caps.DeviceCaps==DXVA2_VPDev_SoftwareDevice? TEXT("DXVA2_VPDev_SoftwareDevice"):TEXT("Unknown")); } } #endif for (UINT i=0;i<NumModes;i++) { DXVA2_VideoProcessorCaps Caps; if (SUCCEEDED(pVideoProcessor->GetVideoProcessorCaps(&pProcessingModes[i],&Caps))) { if (Caps.DeviceCaps==DXVA2_VPDev_HardwareDevice) { pVideoProcessor->SetVideoProcessorMode(&pProcessingModes[i]); break; } } } ::CoTaskMemFree(pProcessingModes); } */ pVideoProcessor->Release(); pGetService->Release(); m_pFilterGraph=pFilterGraph; m_hwndRender=hwndRender; #ifdef EVR_USE_VIDEO_WINDOW m_hwndMessageDrain=hwndMessageDrain; #endif ClearError(); return true; OnError: SAFE_RELEASE(m_pRenderer); #ifdef EVR_USE_VIDEO_WINDOW ::DestroyWindow(m_hwndVideo); m_hwndVideo=NULL; #endif /* if (m_hMFPlatLib) { ::FreeLibrary(m_hMFPlatLib); m_hMFPlatLib=NULL; } */ return false; }