/*
 * Class:     sage_DShowDVDPlayer
 * Method:    setupGraph0
 * Signature: (JLjava/lang/String;Ljava/lang/String;ZZ)V
 */
JNIEXPORT void JNICALL Java_sage_DShowDVDPlayer_setupGraph0
	(JNIEnv *env, jobject jo, jlong dataPtr, jstring jFilename, jstring jhostname, 
		jboolean renderVideo, jboolean renderAudio)
{
	NPE_RET(dataPtr);
	try
	{
		CDVDPlayerData* playerData = dynamic_cast<CDVDPlayerData*>((CPlayerData*) dataPtr);
		const jchar* wszFilename = jFilename ? env->GetStringChars(jFilename, NULL) : NULL;
		HRESULT hr = playerData->SetDVDLocation((const WCHAR*) wszFilename);
		if (wszFilename)
			env->ReleaseStringChars(jFilename, wszFilename);
		PLAYEXCEPT_RET(sage_PlaybackException_FILESYSTEM);

		if (renderAudio)
		{
			hr = playerData->RenderAudio();
			PLAYEXCEPT_RET(sage_PlaybackException_AUDIO_RENDER);
		}
		if (renderVideo)
		{
			hr = playerData->RenderVideo();
			PLAYEXCEPT_RET(sage_PlaybackException_VIDEO_RENDER);
			hr = playerData->RenderSubPicture();
			HTESTPRINT(hr); // if there's no subpicture we're OK and shouldn't fail
			hr = playerData->RenderCC();
			HTESTPRINT(hr); // if there's no CC we're OK and shouldn't fail
		}
		LogFilterGraphInfo(playerData->GetGraph());
	}
	catch (...)
	{
		throwPlaybackException(env, sage_PlaybackException_DIRECTX_INSTALL, 0);
	}
}
Exemple #2
0
/*
 * Class:     sage_DShowMediaPlayer
 * Method:    setNotificationWindow0
 * Signature: (JJ)V
 */
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setNotificationWindow0
  (JNIEnv *env, jobject jo, jlong dataPtr, jlong notifyHwnd)
{
	if (!dataPtr) return;
	CPlayerData* playerData = (CPlayerData*) dataPtr;
	IGraphBuilder* pGraph = playerData->GetGraph();
	if (!pGraph) return;
	IMediaEventEx *pME = NULL;
	pGraph->QueryInterface(IID_IMediaEventEx, (void**)&pME);
	HRESULT hr = pME->SetNotifyWindow((OAHWND)notifyHwnd, WM_DVD_EVENT, 0);
	HTESTPRINT(hr);
	SAFE_RELEASE(pME);
}
Exemple #3
0
/*
 * Class:     sage_DShowMediaPlayer
 * Method:    setVideoHWND0
 * Signature: (JJ)V
 */
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoHWND0
  (JNIEnv *env, jobject jo, jlong dataPtr, jlong vhwnd)
{
	CPlayerData* playData = (CPlayerData*) dataPtr;
	IGraphBuilder* pGraph = playData->GetGraph();
	IVideoWindow* pVW = NULL;
	HRESULT hr = pGraph->QueryInterface(IID_IVideoWindow, (void**)&pVW);
	if (SUCCEEDED(hr))
	{
		slog((env, "DShowPlayer setVideoHWND(%d)\r\n", (int) vhwnd));
		pVW->put_AutoShow(OAFALSE);
		pVW->put_Owner((OAHWND)vhwnd);
		pVW->put_MessageDrain((OAHWND)vhwnd);
		pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
		pVW->put_Visible(OATRUE);

		// We do all of our own aspect ratio control, so don't let DShow do any for us
		// by setting the aspect ratio mode on the video rendering filter's pin
		IEnumFilters *pEnum = NULL;
		hr = pGraph->EnumFilters(&pEnum);
		if (SUCCEEDED(hr))
		{
			IBaseFilter *currFilt = NULL;
			while (pEnum->Next(1, &currFilt, NULL) == S_OK)
			{
				IPin *overlayPin = NULL;
				hr = currFilt->FindPin(L"Input0", &overlayPin);
				if (SUCCEEDED(hr))
				{
					// Right pin name, let's see if it's overlay
					IMixerPinConfig *pOverlayMix = NULL;
					hr = overlayPin->QueryInterface(IID_IMixerPinConfig, (void**)&pOverlayMix);
					if (SUCCEEDED(hr))
					{
						pOverlayMix->SetAspectRatioMode(AM_ARMODE_STRETCHED);
						SAFE_RELEASE(pOverlayMix);
					}
					SAFE_RELEASE(overlayPin);
				}
				SAFE_RELEASE(currFilt);
			}
			SAFE_RELEASE(pEnum);
			hr = S_OK;
		}
		SAFE_RELEASE(pVW);
	}
	HTESTPRINT(hr);
}
Exemple #4
0
/*
 * Class:     sage_DShowCaptureDevice
 * Method:    switchToConnector0
 * Signature: (JIILjava/lang/String;II)V
 */
JNIEXPORT void JNICALL Java_sage_DShowCaptureDevice_switchToConnector0
  (JNIEnv *env, jobject jo, jlong capInfo, jint crossType, jint crossIndex, jstring tuningMode,
  jint countryCode, jint videoFormatCode)
{
	char szTuningMode[16];
	if (!capInfo) return;
	DShowCaptureInfo* pCapInfo = (DShowCaptureInfo*) capInfo;
	pCapInfo->videoFormatCode = videoFormatCode;

	const char* pTuningMode = env->GetStringUTFChars(tuningMode, NULL);
	strncpy( szTuningMode, pTuningMode, sizeof(szTuningMode) );
	env->ReleaseStringUTFChars(tuningMode, pTuningMode);
	slog((env, "switchToConnector0 tuningMode:%s.\r\n", szTuningMode ));

	if ( String2TVType( szTuningMode ) && BDATypeNum( pCapInfo->captureConfig ) > 0 ) //ZQ REMOVE ME
	{
		TV_TYPE newBDAType = String2TVType( szTuningMode );
		if ( pCapInfo->dwBDAType != newBDAType && pCapInfo->dwBDAType > 0 )
		{
			int i, CaptureNum = pCapInfo->captureNum;
			for ( i = 0; i < CaptureNum; i++ )
				if ( pCapInfo->captures[i] && pCapInfo->captures[i]->dwBDAType == pCapInfo->dwBDAType )
					break;
			if ( i >= CaptureNum )
			{
				slog((env, "switchToConnector0 ERROR: Orignal BDA Capture :%d is not found\r\n",  pCapInfo->dwBDAType ));
				ASSERT( 0 );
				return;
			}

			//save back
			memcpy( pCapInfo->captures[i], pCapInfo,  sizeof(DShowCaptureInfo) );

			for ( i = 0; i < CaptureNum; i++ )
				if ( pCapInfo->captures[i] && pCapInfo->captures[i]->dwBDAType == newBDAType )
					break;

			if ( i >= CaptureNum )
			{
				slog((env, "switchToConnector0 ERROR: BDA Capture :%s is not found\r\n",  szTuningMode ));
				ASSERT( 0 );
				return;
			}
			memcpy( pCapInfo, pCapInfo->captures[i], sizeof(DShowCaptureInfo) );
			setChannelDev( (CHANNEL_DATA*)pCapInfo->channel, (void*)pCapInfo );
			slog((env, "switchToConnector0 BDA Capture :%s is switched.\r\n",  szTuningMode ));

		}

		//strncpy( pCapInfo->tvType, szTuningMode, sizeof(pCapInfo->tvType) );
		return;
	}

	if (!pCapInfo->pCrossbar)
		return;
	slog((env, "switchToConnector0 %d type:%d index:%d country:%d format:%d Mode:%s\r\n", 
			(int)capInfo, crossType, crossIndex, countryCode, videoFormatCode, szTuningMode ));


	strncpy( pCapInfo->TuningMode, szTuningMode, sizeof(pCapInfo->TuningMode) );
	// Setup the tuner first since it's upstream from the crossbar
	if (crossType == 1 && pCapInfo->pTVTuner)
	{
		IAMTVTuner* pTunerProps = NULL;
		HRESULT hr = pCapInfo->pTVTuner->QueryInterface(IID_IAMTVTuner, (void**)&pTunerProps);
		if (SUCCEEDED(hr))
		{
			HRESULT ccHr = S_OK;
			if (countryCode)
			{
				long currCountry = 0;
				hr = pTunerProps->get_CountryCode(&currCountry);
				if (FAILED(hr) || currCountry != countryCode)
				{
					hr = ccHr = pTunerProps->put_CountryCode(countryCode);
					HTESTPRINT(hr);
				}
				hr = pTunerProps->put_TuningSpace(countryCode);

				HTESTPRINT(hr);
			}
			AMTunerModeType currMode;
			TunerInputType currTuneType;
			HRESULT currModehr = pTunerProps->get_Mode(&currMode);
			HTESTPRINT(currModehr);
			HRESULT currTypehr = pTunerProps->get_InputType(0, &currTuneType);
			HTESTPRINT(currTypehr);
			AMTunerModeType newMode;
			TunerInputType tuneType;
			slog((env, "Tuning mode:%s; current tuning type:%d current  tuning model:%d\r\n", pCapInfo->TuningMode, currTuneType, currMode  ));
			if (!strcmp(pCapInfo->TuningMode, "Air"))
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = TunerInputAntenna;
			}
			else if (!strcmp(pCapInfo->TuningMode, "FM Radio"))
			{
				newMode = AMTUNER_MODE_FM_RADIO;
				tuneType = TunerInputAntenna;
			}
			else
			if (!strcmp(pCapInfo->TuningMode, "HRC"))
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = (TunerInputType)2;
			} else
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = TunerInputCable;
			}
			if (FAILED(currModehr) || newMode != currMode)
			{
				hr = pTunerProps->put_Mode(newMode);
				HTESTPRINT(hr);
			}
			if (FAILED(currTypehr) || tuneType != currTuneType)
			{
				hr = pTunerProps->put_InputType(0, tuneType);
				HTESTPRINT(hr);
			}
		
			long currConnInput = 0;
			hr = pTunerProps->get_ConnectInput(&currConnInput);
			if (FAILED(hr) || currConnInput != 0)
			{
				hr = pTunerProps->put_ConnectInput(0);
				HTESTPRINT(hr);
			}
			//long tvFormat;
			//hr = pTunerProps->get_TVFormat(&tvFormat);
//ZQ test
/*
{
	IKsPropertySet *pKSProp=NULL;

	KSPROPERTY_TUNER_STANDARD_S Standard;
	hr = pCapInfo->pTVTuner->QueryInterface(IID_IKsPropertySet, (PVOID *)&pKSProp);
	if ( SUCCEEDED(hr) )
	{
		memset(&Standard,0,sizeof(KSPROPERTY_TUNER_STANDARD_S));
		Standard.Standard=videoFormatCode;
		
		HRESULT hr=pKSProp->Set(PROPSETID_TUNER,
					KSPROPERTY_TUNER_STANDARD,
					INSTANCE_DATA_OF_PROPERTY_PTR(&Standard),	
					INSTANCE_DATA_OF_PROPERTY_SIZE(Standard),
					&Standard,	sizeof(Standard));
		if(FAILED(hr))
		{
			slog(( env, "Failed set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
		} else
		{
			slog(( env, "Force to set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
		}
		SAFE_RELEASE( pKSProp );
	} else
	{
		slog(( env, "Failed to get IKsPropertySet to set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
	}
}*/
			SAFE_RELEASE(pTunerProps);
		}

		if (pCapInfo->pTVAudio)
		{
			IAMTVAudio* pAudioProps = NULL;
			hr = pCapInfo->pTVAudio->QueryInterface(IID_IAMTVAudio, (void**)&pAudioProps);
			if (SUCCEEDED(hr))
			{
				// For Vista+; there's the 'PRESET' flags which we want to use instead for setting the TV audio
				// selections.
				OSVERSIONINFOEX osInfo;
				ZeroMemory(&osInfo, sizeof(OSVERSIONINFOEX));
				osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
				DWORD vistaPlus = 0;
				if (GetVersionEx((LPOSVERSIONINFO)&osInfo))
				{
					if (osInfo.dwMajorVersion >= 6)
						vistaPlus = 1;
				}
				if (vistaPlus)
					hr = pAudioProps->put_TVAudioMode(AMTVAUDIO_PRESET_STEREO | AMTVAUDIO_PRESET_LANG_A);
				else
					hr = pAudioProps->put_TVAudioMode(AMTVAUDIO_MODE_STEREO | AMTVAUDIO_MODE_LANG_A);
				HTESTPRINT(hr);
			}
			SAFE_RELEASE(pAudioProps);
		}
	}



	// Setup the crossbar for the graph
	IAMCrossbar *pXBar1 = NULL;

	HRESULT hr = pCapInfo->pCrossbar->QueryInterface(IID_IAMCrossbar, (void**)&pXBar1);
	HTESTPRINT(hr);

	// Look through the pins on the crossbar and find the correct one for the type of
	// connector we're routing. Also find the aligned audio pin and set that too.
	int tempCrossIndex = crossIndex;
	long i;
	long videoOutNum = -1;
	long audioOutNum = -1;
	long numIn, numOut;
	hr = pXBar1->get_PinCounts(&numOut, &numIn);
	HTESTPRINT(hr);
	long relatedPin, pinType;
	for (i = 0; i < numOut; i++)
	{
		hr = pXBar1->get_CrossbarPinInfo(FALSE, i, &relatedPin, &pinType);
		HTESTPRINT(hr);
		if (pinType == PhysConn_Video_VideoDecoder)
			videoOutNum = i;
		else if (pinType == PhysConn_Audio_AudioDecoder)
			audioOutNum = i;
	}
	for (i = 0; i < numIn; i++)
	{
		hr = pXBar1->get_CrossbarPinInfo(TRUE, i, &relatedPin, &pinType);
		HTESTPRINT(hr);
		if (pinType == crossType || (pinType == PhysConn_Video_YRYBY && crossType == 90)) // 90 is Component+SPDIF
		{
			if ((crossType != 1 && tempCrossIndex > 0) ||
				tempCrossIndex == 1)
			{
				tempCrossIndex--;
				continue;
			}
			// Route the video
			long currRoute = -1;
//			hr = pXBar1->get_IsRoutedTo(videoOutNum, &currRoute);
//			if (FAILED(hr) || currRoute != i)
			{
				hr = pXBar1->Route(videoOutNum, i);
				HTESTPRINT(hr);
			}
			
			if (audioOutNum >= 0)
			{
				if (crossType == PhysConn_Video_YRYBY || crossType == 90)
				{
					long relatedPinType = 0;
					long junk = 0;
					pXBar1->get_CrossbarPinInfo(TRUE, relatedPin, &junk, &relatedPinType);
					if ((relatedPinType != PhysConn_Audio_SPDIFDigital && crossType == 90) ||
						(relatedPinType == PhysConn_Audio_SPDIFDigital && crossType == PhysConn_Video_YRYBY))
					{
						// Find the other audio input pin that's related to the component input and use that
						int j;
						long otherRelatedPin = 0;
						for (j = 0; j < numIn; j++)
						{
							if (j == relatedPin) continue;
							otherRelatedPin = 0;
							pXBar1->get_CrossbarPinInfo(TRUE, j, &otherRelatedPin, &junk);
							if (otherRelatedPin == i)
							{
								slog(( env, "Crossbar swapping related audio pins on component video input old:%d new:%d\r\n", relatedPin, j));
								relatedPin = j;
								break;
							}
						}
					}

				}
				// Route any related audio
//				hr = pXBar1->get_IsRoutedTo(audioOutNum, &currRoute);
//				if (FAILED(hr) || currRoute != relatedPin)
				{
					hr = pXBar1->Route(audioOutNum, relatedPin);
					HTESTPRINT(hr);
				}
			}
			slog(( env, "Crossbar route: video:%d, auido:%d\r\n", i, relatedPin ));
			break;
		}
	}

	SAFE_RELEASE(pXBar1);

	if (audioOutNum == -1)
	{
		// It may have 2 crossbars, like ATI. Search for the second one.
		hr = pCapInfo->pBuilder->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, pCapInfo->pCrossbar,
			IID_IAMCrossbar, (void**)&pXBar1);
		if (SUCCEEDED(hr))
		{
			slog((env, "Found secondary audio crossbar, routing it\r\n"));
			tempCrossIndex = crossIndex;
			hr = pXBar1->get_PinCounts(&numOut, &numIn);
			HTESTPRINT(hr);
			for (i = 0; i < numOut; i++)
			{
				hr = pXBar1->get_CrossbarPinInfo(FALSE, i, &relatedPin, &pinType);
				HTESTPRINT(hr);
				if (pinType == PhysConn_Audio_AudioDecoder)
				{
					audioOutNum = i;
					break;
				}
			}
			for (i = 0; i < numIn && audioOutNum >= 0; i++)
			{
				hr = pXBar1->get_CrossbarPinInfo(TRUE, i, &relatedPin, &pinType);
				HTESTPRINT(hr);
				if (pinType == crossType)
				{
					if ((crossType != 1 && tempCrossIndex > 0) ||
						tempCrossIndex == 1)
					{
						tempCrossIndex--;
						continue;
					}
					// Route any related audio
					hr = pXBar1->Route(audioOutNum, relatedPin);
					HTESTPRINT(hr);
					break;
				}
			}
			SAFE_RELEASE(pXBar1);
		}
	}

	IAMAnalogVideoDecoder *vidDec = NULL;
	hr = pCapInfo->pVideoCaptureFilter->QueryInterface(IID_IAMAnalogVideoDecoder, (void**)&vidDec);
	if (SUCCEEDED(hr))
	{
		/*if (FAILED(ccHr) && countryCode == 54) 
		{
			tvFormat = AnalogVideo_PAL_N;
		}*/
		hr = vidDec->put_TVFormat(videoFormatCode);
		HTESTPRINT(hr);
		/*if (FAILED(hr) && tvFormat == AnalogVideo_PAL_N) 
		{ 
			hr = vidDec->put_TVFormat(AnalogVideo_PAL_B);
		} */
		SAFE_RELEASE(vidDec);
	}
	slog((env, "DONE: switchToConnector0 %d type=%d index=%d\r\n", (int)capInfo, crossType, crossIndex));
}
Exemple #5
0
/*
 * Class:     sage_DShowMediaPlayer
 * Method:    setVideoRendererFilter0
 * Signature: (JLjava/lang/String;Ljava/util/Map;)V
 */
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoRendererFilter0
  (JNIEnv *env, jobject jo, jlong dataPtr, jstring jfilterName, jobject jfilterOptions)
{
	if (jfilterName == NULL || env->GetStringLength(jfilterName) == 0 || !dataPtr){return;}
	CPlayerData* pData = (CPlayerData*) dataPtr;
	const char* cName = env->GetStringUTFChars(jfilterName, NULL);
	slog((env, "DShowPlayer setVideoRendererFilter0(%s) called\r\n", cName));
	CComPtr<IBaseFilter> pFilter = NULL;
	HRESULT hr = FindFilterByName(&pFilter, CLSID_LegacyAmFilterCategory, cName);
	env->ReleaseStringUTFChars(jfilterName, cName);
	BOOL vmr9Config = FALSE;
	BOOL evrConfig = FALSE;
	if (SUCCEEDED(hr) && jfilterOptions)
	{
		jint dxvaMode = 0;
		jint dxvaDeinterlace = 0;
		GetMapIntValue(env, jfilterOptions, "dxva_mpeg_mode", &dxvaMode);
		GetMapIntValue(env, jfilterOptions, "force_deinterlace", &dxvaDeinterlace);
		pData->SetDXVAParameters(dxvaMode, dxvaDeinterlace);

		jboolean ccOK = JNI_TRUE;
		GetMapBoolValue(env, jfilterOptions, "enable_cc", &ccOK);
		if (!ccOK)
			pData->DisableCC();
		// Get the DX9 device pointers, if they don't exist we can't use our custom VMR9 renderer
		jlong jD3D = 0;
		jlong jD3DDevice = 0;
		if (GetMapLongValue(env, jfilterOptions, "d3d_object_ptr", &jD3D) &&
			GetMapLongValue(env, jfilterOptions, "d3d_device_ptr", &jD3DDevice))
		{
			IDirect3D9* pD3D = (IDirect3D9*) jD3D;
			IDirect3DDevice9* pD3DDevice = (IDirect3DDevice9*) jD3DDevice;
			// Set the rendering mode and number of streams.
			CComPtr<IVMRFilterConfig9> pConfig = NULL;
			// See if it's EVR or VMR
			hr = pFilter->QueryInterface(IID_IVMRFilterConfig9, (void**)&(pConfig.p));
			if (SUCCEEDED(hr))
			{
				slog((env, "Using VMR9 for video rendering\r\n"));
				hr = pConfig->SetRenderingMode(VMR9Mode_Renderless);
				PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL);

				/*
				 * NOTE: If we don't set the number of streams than we don't get the optimal
				 * format types as choices and end up using a private texture when we don't need to.
				 * I think this is because certain features of the
				 * VMR are not available in mixing mode or something like that.
				 * Update: 10/12/2004 - I have now learned that when you put the VMR9
				 * into mixing mode that it will then use the D3DRenderTarget itself
				 * to do the mixing.  I saw a usenet post of the exact VMR9 corruption
				 * problem I was having where the OSD was showing up on the video frame surface.
				 * By not setting the number of streams I keep the VMR9 in Renderless non-mixing mode.
				 * BUT this has the downside of breaking CC support for the VMR9 so we have a registry
				 * setting to allow this.
				 * 10/13/04 - The first problem came back where the format types are wrong. No idea
				 * why this was working fine yesterday.
				 */
				if (GetRegistryDword(HKEY_LOCAL_MACHINE,
					"Software\\Frey Technologies\\SageTV\\DirectX9", "AllowCCForVMR9", 1) &&
					ccOK)
				{
					// NOTE: We changed this from 2 to 3 because on Vista you need another input
					// to handle subpicture blending for DVD playback. And I don't believe there's any
					// negative to having 3 instead of 2; the big difference is between 1 and 2.
					hr = pConfig->SetNumberOfStreams(3); // video + CC + subpicture
					PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL);
				}
				else
				{
					hr = pConfig->SetNumberOfStreams(1);
					PLAYEXCEPT_RET(sage_PlaybackException_DIRECTX_INSTALL);
				}

				CComPtr<IVMRSurfaceAllocatorNotify9> lpIVMRSurfAllocNotify = NULL;
				pFilter->QueryInterface(IID_IVMRSurfaceAllocatorNotify9,
					(void**)&(lpIVMRSurfAllocNotify.p));

				// create our surface allocator
				CVMRAllocator* myVMRAllocator = new CVMRAllocator(hr, pD3D, pD3DDevice);
				PLAYEXCEPT_RET(sage_PlaybackException_SAGETV_INSTALL);
				pData->SetVMR9Allocator(myVMRAllocator);

				// let the allocator and the notify know about each other
				hr = lpIVMRSurfAllocNotify->AdviseSurfaceAllocator(0xCAFEBABE, myVMRAllocator);
				HTESTPRINT(hr);
				hr = myVMRAllocator->AdviseNotify(lpIVMRSurfAllocNotify);
				HTESTPRINT(hr);

				hr = S_OK;

				vmr9Config = TRUE;
			}
			else
			{
				slog((env, "Using EVR for video render\r\n"));
				evrConfig = TRUE;
				hr = S_OK;
			}
		}
	}
	if (SUCCEEDED(hr))
		hr = pData->SetVideoRenderer(pFilter);
	if (SUCCEEDED(hr) && evrConfig)
	{
		// Configure the EVR presenter after we add the EVR to the filter graph
		jlong jD3DDevMgr = 0;
		if (GetMapLongValue(env, jfilterOptions, "d3d_device_mgr", &jD3DDevMgr))
		{
			CComPtr<IMFVideoRenderer> lpIMFVideoRenderer = NULL;
			hr = pFilter->QueryInterface(IID_IMFVideoRenderer, (void**)&(lpIMFVideoRenderer.p));
			HTESTPRINT(hr);
			if (SUCCEEDED(hr))
			{
				// Configure EVR to use our custom presenter
				CComPtr<IMFVideoPresenter> lpIMFVideoPresenter = NULL;
				hr = CoCreateInstance(CLSID_CustomEVRPresenter, NULL, CLSCTX_INPROC_SERVER,
					IID_IMFVideoPresenter, (void**)&(lpIMFVideoPresenter.p));
				HTESTPRINT(hr);
				if (SUCCEEDED(hr))
				{
					// Set the Direct3D device pointer
					IDirect3DDeviceManager9* pD3DDevMgr = (IDirect3DDeviceManager9*) jD3DDevMgr;
					ISTVEVRPrstr* pMyEvr = NULL;
					lpIMFVideoPresenter->QueryInterface(IID_ISTVEVRPrstr, (void**)&pMyEvr);
					pMyEvr->set_D3DDeviceMgr(pD3DDevMgr);
					hr = lpIMFVideoRenderer->InitializeRenderer(NULL, lpIMFVideoPresenter);
					HTESTPRINT(hr);
					SAFE_RELEASE(pMyEvr);

					IEVRFilterConfig* pEvrConfig = NULL;
					hr = pFilter->QueryInterface(IID_IEVRFilterConfig, (void**)&pEvrConfig);
					HTESTPRINT(hr);
					// Try three inputs for now; one for video, one for CC and one for subpicture
					// But only use 3 on Vista by default since we've seen issues w/ it on XP
					OSVERSIONINFOEX osInfo;
					ZeroMemory(&osInfo, sizeof(OSVERSIONINFOEX));
					osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
					DWORD evrInputsDefault = 1;
					if (GetVersionEx((LPOSVERSIONINFO)&osInfo))
					{
						if (osInfo.dwMajorVersion >= 6)
							evrInputsDefault = 3;
					}
					DWORD evrInputs = GetRegistryDword(HKEY_LOCAL_MACHINE,
						"Software\\Frey Technologies\\SageTV\\DirectX9", "EVRInputPins", evrInputsDefault);
					slog((env, "Using %d input pins on the EVR\r\n", (int)evrInputs));
					pEvrConfig->SetNumberOfStreams(evrInputs < 1 ? 1 : (evrInputs > 3 ? 3 : evrInputs));
					SAFE_RELEASE(pEvrConfig);
					slog((env, "Finished with EVR configuration OK\r\n"));
				}
			}
		}
	}
	else if (SUCCEEDED(hr) && vmr9Config)
	{
		IVMRDeinterlaceControl9* pVmrDeint = NULL;
		hr = pFilter->QueryInterface(IID_IVMRDeinterlaceControl9, (void**)&pVmrDeint);
		if (SUCCEEDED(hr))
		{
			slog(("Setting up VMR9 deinterlacing\r\n"));
			hr = pVmrDeint->SetDeinterlacePrefs(DeinterlacePref9_NextBest);
			HTESTPRINT(hr);
/*			VMR9VideoDesc VideoDesc;
			DWORD dwNumModes = 0;
			AM_MEDIA_TYPE vmrConn;
			hr = renderInput->ConnectionMediaType(&vmrConn);
			HTESTPRINT(hr);
			if (vmrConn.formattype == FORMAT_VideoInfo2)
			{
				VIDEOINFOHEADER2* vih2 = (VIDEOINFOHEADER2*) vmrConn.pbFormat;

				// Fill in the VideoDesc structure
				VideoDesc.dwSize = sizeof(VMR9VideoDesc);
				VideoDesc.dwSampleWidth = vih2->bmiHeader.biWidth;
				VideoDesc.dwSampleHeight = vih2->bmiHeader.biHeight;
				VideoDesc.SampleFormat = ConvertInterlaceFlags(vih2->dwInterlaceFlags);
				VideoDesc.dwFourCC = vih2->bmiHeader.biCompression;
				VideoDesc.InputSampleFreq.dwNumerator = 30000;
				VideoDesc.InputSampleFreq.dwDenominator = 1001;
				VideoDesc.OutputFrameFreq.dwNumerator = 60000;
				VideoDesc.OutputFrameFreq.dwDenominator = 1001;
				hr = pVmrDeint->GetNumberOfDeinterlaceModes(&VideoDesc,
					&dwNumModes, NULL);
				HTESTPRINT(hr);
				if (SUCCEEDED(hr) && (dwNumModes != 0))
				{
					// Allocate an array for the GUIDs that identify the modes.
					GUID *pModes = new GUID[dwNumModes];
					if (pModes)
					{
						// Fill the array.
						hr = pVmrDeint->GetNumberOfDeinterlaceModes(&VideoDesc,
							&dwNumModes, pModes);
						if (SUCCEEDED(hr))
						{
							HTESTPRINT(hr);
							// Loop through each item and get the capabilities.
							for (DWORD i = 0; i < dwNumModes; i++)
							{
								VMR9DeinterlaceCaps Caps;
								ZeroMemory(&Caps, sizeof(Caps));
								Caps.dwSize = sizeof(VMR9DeinterlaceCaps);
								hr = pVmrDeint->GetDeinterlaceModeCaps(&(pModes[i]),
									&VideoDesc, &Caps);
								HTESTPRINT(hr);
								if (SUCCEEDED(hr))
								{
									if (Caps.DeinterlaceTechnology == DeinterlaceTech9_BOBLineReplicate)
										slog(("VM9Deinterlacing Tech: BOBLineReplicate\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_BOBVerticalStretch)
										slog(("VM9Deinterlacing Tech: BOBVerticalStretch\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_MedianFiltering)
										slog(("VM9Deinterlacing Tech: MedianFiltering\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_EdgeFiltering)
										slog(("VM9Deinterlacing Tech: EdgeFiltering\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_FieldAdaptive)
										slog(("VM9Deinterlacing Tech: FieldAdaptive\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_PixelAdaptive)
										slog(("VM9Deinterlacing Tech: PixelAdaptive\r\n"));
									else if (Caps.DeinterlaceTechnology == DeinterlaceTech9_MotionVectorSteered)
										slog(("VM9Deinterlacing Tech: MotionVectorSteered\r\n"));
									else
										slog(("VM9Deinterlacing Tech: Proprietary...\r\n"));
								}
							}
							if (dwNumModes)
							{
//								hr = pVmrDeint->SetDeinterlaceMode(0, pModes);
//								HTESTPRINT(hr);
							}
						}
						delete [] pModes;
					}
				}
				FreeMediaType(vmrConn);
			}
			GUID realDeint;
			hr = pVmrDeint->GetActualDeinterlaceMode(0, &realDeint);
			LPOLESTR psz;
			StringFromCLSID(realDeint, &psz);
			char conv[64];
			WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0);
			CoTaskMemFree(psz);
			slog(("Actual deinterlace: hr=0x%x guid=%s\r\n", hr, conv));
			GUID setDeint;
			hr = pVmrDeint->GetDeinterlaceMode(0, &setDeint);
			StringFromCLSID(setDeint, &psz);
			WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0);
			CoTaskMemFree(psz);
			slog(("deinterlace mode: hr=0x%x guid=%s\r\n", hr, conv));
			if (hr == S_FALSE)
			{
				slog(("Setting deinterlace mode to actual mode...\r\n"));
				hr = pVmrDeint->SetDeinterlaceMode(0, &realDeint);
				hr = pVmrDeint->GetDeinterlaceMode(0, &setDeint);
				StringFromCLSID(setDeint, &psz);
				WideCharToMultiByte(CP_ACP, 0, psz, -1, conv, 64, 0, 0);
				CoTaskMemFree(psz);
				slog(("deinterlace mode: hr=0x%x guid=%s\r\n", hr, conv));
			}
*/
			pVmrDeint->Release();
		}
		DWORD vmrMixMode = GetRegistryDword(HKEY_LOCAL_MACHINE,
			"Software\\Frey Technologies\\SageTV\\DirectX9", "YUVMixing", 1);
		if (vmrMixMode)
		{
			IVMRMixerControl9* vmrMix = NULL;
			hr = pFilter->QueryInterface(IID_IVMRMixerControl9, (void**)&vmrMix);
			if (SUCCEEDED(hr))
			{
				DWORD currPrefs = 0;
				hr = vmrMix->GetMixingPrefs(&currPrefs);
				slog((env, "Curr Mix Prefs=0x%x\r\n", currPrefs));
				currPrefs &= ~MixerPref9_RenderTargetMask;
				currPrefs |= MixerPref9_RenderTargetYUV;
				hr = vmrMix->SetMixingPrefs(currPrefs);
				vmrMixMode = currPrefs;
				vmrMix->GetMixingPrefs(&currPrefs);
				slog((env, "Set to 0x%x, hr=0x%x, New Mix Prefs=0x%x\r\n", vmrMixMode, hr, currPrefs));
				vmrMix->Release();
			}
			else
				HTESTPRINT(hr);
		}
	}
	if (FAILED(hr))
	{
		elog((env, "Could not add specified video rendering filter to graph hr=0x%x\r\n", hr));
	}
}