예제 #1
0
/*
 * Class:     sage_DShowCaptureDevice
 * Method:    switchToConnector0
 * Signature: (JIILjava/lang/String;II)V
 */
JNIEXPORT void JNICALL Java_sage_DShowCaptureDevice_switchToConnector0
  (JNIEnv *env, jobject jo, jlong capInfo, jint crossType, jint crossIndex, jstring tuningMode,
  jint countryCode, jint videoFormatCode)
{
	char szTuningMode[16];
	if (!capInfo) return;
	DShowCaptureInfo* pCapInfo = (DShowCaptureInfo*) capInfo;
	pCapInfo->videoFormatCode = videoFormatCode;

	const char* pTuningMode = env->GetStringUTFChars(tuningMode, NULL);
	strncpy( szTuningMode, pTuningMode, sizeof(szTuningMode) );
	env->ReleaseStringUTFChars(tuningMode, pTuningMode);
	slog((env, "switchToConnector0 tuningMode:%s.\r\n", szTuningMode ));

	if ( String2TVType( szTuningMode ) && BDATypeNum( pCapInfo->captureConfig ) > 0 ) //ZQ REMOVE ME
	{
		TV_TYPE newBDAType = String2TVType( szTuningMode );
		if ( pCapInfo->dwBDAType != newBDAType && pCapInfo->dwBDAType > 0 )
		{
			int i, CaptureNum = pCapInfo->captureNum;
			for ( i = 0; i < CaptureNum; i++ )
				if ( pCapInfo->captures[i] && pCapInfo->captures[i]->dwBDAType == pCapInfo->dwBDAType )
					break;
			if ( i >= CaptureNum )
			{
				slog((env, "switchToConnector0 ERROR: Orignal BDA Capture :%d is not found\r\n",  pCapInfo->dwBDAType ));
				ASSERT( 0 );
				return;
			}

			//save back
			memcpy( pCapInfo->captures[i], pCapInfo,  sizeof(DShowCaptureInfo) );

			for ( i = 0; i < CaptureNum; i++ )
				if ( pCapInfo->captures[i] && pCapInfo->captures[i]->dwBDAType == newBDAType )
					break;

			if ( i >= CaptureNum )
			{
				slog((env, "switchToConnector0 ERROR: BDA Capture :%s is not found\r\n",  szTuningMode ));
				ASSERT( 0 );
				return;
			}
			memcpy( pCapInfo, pCapInfo->captures[i], sizeof(DShowCaptureInfo) );
			setChannelDev( (CHANNEL_DATA*)pCapInfo->channel, (void*)pCapInfo );
			slog((env, "switchToConnector0 BDA Capture :%s is switched.\r\n",  szTuningMode ));

		}

		//strncpy( pCapInfo->tvType, szTuningMode, sizeof(pCapInfo->tvType) );
		return;
	}

	if (!pCapInfo->pCrossbar)
		return;
	slog((env, "switchToConnector0 %d type:%d index:%d country:%d format:%d Mode:%s\r\n", 
			(int)capInfo, crossType, crossIndex, countryCode, videoFormatCode, szTuningMode ));


	strncpy( pCapInfo->TuningMode, szTuningMode, sizeof(pCapInfo->TuningMode) );
	// Setup the tuner first since it's upstream from the crossbar
	if (crossType == 1 && pCapInfo->pTVTuner)
	{
		IAMTVTuner* pTunerProps = NULL;
		HRESULT hr = pCapInfo->pTVTuner->QueryInterface(IID_IAMTVTuner, (void**)&pTunerProps);
		if (SUCCEEDED(hr))
		{
			HRESULT ccHr = S_OK;
			if (countryCode)
			{
				long currCountry = 0;
				hr = pTunerProps->get_CountryCode(&currCountry);
				if (FAILED(hr) || currCountry != countryCode)
				{
					hr = ccHr = pTunerProps->put_CountryCode(countryCode);
					HTESTPRINT(hr);
				}
				hr = pTunerProps->put_TuningSpace(countryCode);

				HTESTPRINT(hr);
			}
			AMTunerModeType currMode;
			TunerInputType currTuneType;
			HRESULT currModehr = pTunerProps->get_Mode(&currMode);
			HTESTPRINT(currModehr);
			HRESULT currTypehr = pTunerProps->get_InputType(0, &currTuneType);
			HTESTPRINT(currTypehr);
			AMTunerModeType newMode;
			TunerInputType tuneType;
			slog((env, "Tuning mode:%s; current tuning type:%d current  tuning model:%d\r\n", pCapInfo->TuningMode, currTuneType, currMode  ));
			if (!strcmp(pCapInfo->TuningMode, "Air"))
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = TunerInputAntenna;
			}
			else if (!strcmp(pCapInfo->TuningMode, "FM Radio"))
			{
				newMode = AMTUNER_MODE_FM_RADIO;
				tuneType = TunerInputAntenna;
			}
			else
			if (!strcmp(pCapInfo->TuningMode, "HRC"))
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = (TunerInputType)2;
			} else
			{
				newMode = AMTUNER_MODE_TV;
				tuneType = TunerInputCable;
			}
			if (FAILED(currModehr) || newMode != currMode)
			{
				hr = pTunerProps->put_Mode(newMode);
				HTESTPRINT(hr);
			}
			if (FAILED(currTypehr) || tuneType != currTuneType)
			{
				hr = pTunerProps->put_InputType(0, tuneType);
				HTESTPRINT(hr);
			}
		
			long currConnInput = 0;
			hr = pTunerProps->get_ConnectInput(&currConnInput);
			if (FAILED(hr) || currConnInput != 0)
			{
				hr = pTunerProps->put_ConnectInput(0);
				HTESTPRINT(hr);
			}
			//long tvFormat;
			//hr = pTunerProps->get_TVFormat(&tvFormat);
//ZQ test
/*
{
	IKsPropertySet *pKSProp=NULL;

	KSPROPERTY_TUNER_STANDARD_S Standard;
	hr = pCapInfo->pTVTuner->QueryInterface(IID_IKsPropertySet, (PVOID *)&pKSProp);
	if ( SUCCEEDED(hr) )
	{
		memset(&Standard,0,sizeof(KSPROPERTY_TUNER_STANDARD_S));
		Standard.Standard=videoFormatCode;
		
		HRESULT hr=pKSProp->Set(PROPSETID_TUNER,
					KSPROPERTY_TUNER_STANDARD,
					INSTANCE_DATA_OF_PROPERTY_PTR(&Standard),	
					INSTANCE_DATA_OF_PROPERTY_SIZE(Standard),
					&Standard,	sizeof(Standard));
		if(FAILED(hr))
		{
			slog(( env, "Failed set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
		} else
		{
			slog(( env, "Force to set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
		}
		SAFE_RELEASE( pKSProp );
	} else
	{
		slog(( env, "Failed to get IKsPropertySet to set Video Format:%d on TVTuner hr=0x%x \r\n", videoFormatCode, hr  ));
	}
}*/
			SAFE_RELEASE(pTunerProps);
		}

		if (pCapInfo->pTVAudio)
		{
			IAMTVAudio* pAudioProps = NULL;
			hr = pCapInfo->pTVAudio->QueryInterface(IID_IAMTVAudio, (void**)&pAudioProps);
			if (SUCCEEDED(hr))
			{
				// For Vista+; there's the 'PRESET' flags which we want to use instead for setting the TV audio
				// selections.
				OSVERSIONINFOEX osInfo;
				ZeroMemory(&osInfo, sizeof(OSVERSIONINFOEX));
				osInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
				DWORD vistaPlus = 0;
				if (GetVersionEx((LPOSVERSIONINFO)&osInfo))
				{
					if (osInfo.dwMajorVersion >= 6)
						vistaPlus = 1;
				}
				if (vistaPlus)
					hr = pAudioProps->put_TVAudioMode(AMTVAUDIO_PRESET_STEREO | AMTVAUDIO_PRESET_LANG_A);
				else
					hr = pAudioProps->put_TVAudioMode(AMTVAUDIO_MODE_STEREO | AMTVAUDIO_MODE_LANG_A);
				HTESTPRINT(hr);
			}
			SAFE_RELEASE(pAudioProps);
		}
	}



	// Setup the crossbar for the graph
	IAMCrossbar *pXBar1 = NULL;

	HRESULT hr = pCapInfo->pCrossbar->QueryInterface(IID_IAMCrossbar, (void**)&pXBar1);
	HTESTPRINT(hr);

	// Look through the pins on the crossbar and find the correct one for the type of
	// connector we're routing. Also find the aligned audio pin and set that too.
	int tempCrossIndex = crossIndex;
	long i;
	long videoOutNum = -1;
	long audioOutNum = -1;
	long numIn, numOut;
	hr = pXBar1->get_PinCounts(&numOut, &numIn);
	HTESTPRINT(hr);
	long relatedPin, pinType;
	for (i = 0; i < numOut; i++)
	{
		hr = pXBar1->get_CrossbarPinInfo(FALSE, i, &relatedPin, &pinType);
		HTESTPRINT(hr);
		if (pinType == PhysConn_Video_VideoDecoder)
			videoOutNum = i;
		else if (pinType == PhysConn_Audio_AudioDecoder)
			audioOutNum = i;
	}
	for (i = 0; i < numIn; i++)
	{
		hr = pXBar1->get_CrossbarPinInfo(TRUE, i, &relatedPin, &pinType);
		HTESTPRINT(hr);
		if (pinType == crossType || (pinType == PhysConn_Video_YRYBY && crossType == 90)) // 90 is Component+SPDIF
		{
			if ((crossType != 1 && tempCrossIndex > 0) ||
				tempCrossIndex == 1)
			{
				tempCrossIndex--;
				continue;
			}
			// Route the video
			long currRoute = -1;
//			hr = pXBar1->get_IsRoutedTo(videoOutNum, &currRoute);
//			if (FAILED(hr) || currRoute != i)
			{
				hr = pXBar1->Route(videoOutNum, i);
				HTESTPRINT(hr);
			}
			
			if (audioOutNum >= 0)
			{
				if (crossType == PhysConn_Video_YRYBY || crossType == 90)
				{
					long relatedPinType = 0;
					long junk = 0;
					pXBar1->get_CrossbarPinInfo(TRUE, relatedPin, &junk, &relatedPinType);
					if ((relatedPinType != PhysConn_Audio_SPDIFDigital && crossType == 90) ||
						(relatedPinType == PhysConn_Audio_SPDIFDigital && crossType == PhysConn_Video_YRYBY))
					{
						// Find the other audio input pin that's related to the component input and use that
						int j;
						long otherRelatedPin = 0;
						for (j = 0; j < numIn; j++)
						{
							if (j == relatedPin) continue;
							otherRelatedPin = 0;
							pXBar1->get_CrossbarPinInfo(TRUE, j, &otherRelatedPin, &junk);
							if (otherRelatedPin == i)
							{
								slog(( env, "Crossbar swapping related audio pins on component video input old:%d new:%d\r\n", relatedPin, j));
								relatedPin = j;
								break;
							}
						}
					}

				}
				// Route any related audio
//				hr = pXBar1->get_IsRoutedTo(audioOutNum, &currRoute);
//				if (FAILED(hr) || currRoute != relatedPin)
				{
					hr = pXBar1->Route(audioOutNum, relatedPin);
					HTESTPRINT(hr);
				}
			}
			slog(( env, "Crossbar route: video:%d, auido:%d\r\n", i, relatedPin ));
			break;
		}
	}

	SAFE_RELEASE(pXBar1);

	if (audioOutNum == -1)
	{
		// It may have 2 crossbars, like ATI. Search for the second one.
		hr = pCapInfo->pBuilder->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, pCapInfo->pCrossbar,
			IID_IAMCrossbar, (void**)&pXBar1);
		if (SUCCEEDED(hr))
		{
			slog((env, "Found secondary audio crossbar, routing it\r\n"));
			tempCrossIndex = crossIndex;
			hr = pXBar1->get_PinCounts(&numOut, &numIn);
			HTESTPRINT(hr);
			for (i = 0; i < numOut; i++)
			{
				hr = pXBar1->get_CrossbarPinInfo(FALSE, i, &relatedPin, &pinType);
				HTESTPRINT(hr);
				if (pinType == PhysConn_Audio_AudioDecoder)
				{
					audioOutNum = i;
					break;
				}
			}
			for (i = 0; i < numIn && audioOutNum >= 0; i++)
			{
				hr = pXBar1->get_CrossbarPinInfo(TRUE, i, &relatedPin, &pinType);
				HTESTPRINT(hr);
				if (pinType == crossType)
				{
					if ((crossType != 1 && tempCrossIndex > 0) ||
						tempCrossIndex == 1)
					{
						tempCrossIndex--;
						continue;
					}
					// Route any related audio
					hr = pXBar1->Route(audioOutNum, relatedPin);
					HTESTPRINT(hr);
					break;
				}
			}
			SAFE_RELEASE(pXBar1);
		}
	}

	IAMAnalogVideoDecoder *vidDec = NULL;
	hr = pCapInfo->pVideoCaptureFilter->QueryInterface(IID_IAMAnalogVideoDecoder, (void**)&vidDec);
	if (SUCCEEDED(hr))
	{
		/*if (FAILED(ccHr) && countryCode == 54) 
		{
			tvFormat = AnalogVideo_PAL_N;
		}*/
		hr = vidDec->put_TVFormat(videoFormatCode);
		HTESTPRINT(hr);
		/*if (FAILED(hr) && tvFormat == AnalogVideo_PAL_N) 
		{ 
			hr = vidDec->put_TVFormat(AnalogVideo_PAL_B);
		} */
		SAFE_RELEASE(vidDec);
	}
	slog((env, "DONE: switchToConnector0 %d type=%d index=%d\r\n", (int)capInfo, crossType, crossIndex));
}
예제 #2
0
bool directx_camera_server::open_and_find_parameters(const int which, unsigned width, unsigned height)
{
  HRESULT hr;

  //-------------------------------------------------------------------
  // Create COM and DirectX objects needed to access a video stream.

  // Initialize COM.  This must have a matching uninitialize somewhere before
  // the object is destroyed.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoInitialize\n");
#endif
  CoInitialize(NULL);

  // Create the filter graph manager
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance FilterGraph\n");
#endif
  CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, 
		      IID_IGraphBuilder, (void **)&_pGraph);
  if (_pGraph == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph manager\n");
    return false;
  }
  _pGraph->QueryInterface(IID_IMediaControl, (void **)&_pMediaControl);
  _pGraph->QueryInterface(IID_IMediaEvent, (void **)&_pEvent);

  // Create the Capture Graph Builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance CaptureGraphBuilder2\n");
#endif
  CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, 
      IID_ICaptureGraphBuilder2, (void **)&_pBuilder);
  if (_pBuilder == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create graph builder\n");
    return false;
  }

  // Associate the graph with the builder.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetFilterGraph\n");
#endif
  _pBuilder->SetFiltergraph(_pGraph);

  //-------------------------------------------------------------------
  // Go find a video device to use: in this case, we are using the Nth
  // one we find, where the number N is the "which" parameter.

  // Create the system device enumerator.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SystemDeviceEnum\n");
#endif
  ICreateDevEnum *pDevEnum = NULL;
  CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, 
      IID_ICreateDevEnum, (void **)&pDevEnum);
  if (pDevEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create device enumerator\n");
    return false;
  }

  // Create an enumerator for video capture devices.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CreateClassEnumerator\n");
#endif
  IEnumMoniker *pClassEnum = NULL;
  pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
  if (pClassEnum == NULL) {
    fprintf(stderr, "directx_camera_server::open_and_find_parameters(): Can't create video enumerator (no cameras?)\n");
    pDevEnum->Release();
    return false;
  }

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before Loop over enumerators\n");
#endif
  ULONG cFetched;
  IMoniker *pMoniker = NULL;
  IBaseFilter *pSrc = NULL;
  // Skip (which - 1) cameras
  int i;
  for (i = 0; i < which-1 ; i++) {
    if (pClassEnum->Next(1, &pMoniker, &cFetched) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
      pMoniker->Release();
      return false;
    }
  }
  // Take the next camera and bind it
  if (pClassEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
    // Bind the first moniker to a filter object.
    pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pSrc);
    pMoniker->Release();
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't open camera (not enough cameras)\n");
    pMoniker->Release();
    return false;
  }

  pClassEnum->Release();
  pDevEnum->Release();

  //-------------------------------------------------------------------
  // Construct the sample grabber callback handler that will be used
  // to receive image data from the sample grabber.
  if ( (_pCallback = new directx_samplegrabber_callback()) == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't create sample grabber callback handler (out of memory?)\n");
    return false;
  }

  //-------------------------------------------------------------------
  // Construct the sample grabber that will be used to snatch images from
  // the video stream as they go by.  Set its media type and callback.

  // Create the Sample Grabber.
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance SampleGrabber\n");
#endif
  CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&_pSampleGrabberFilter));
  if (_pSampleGrabberFilter == NULL) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get SampleGrabber filter (not DirectX 8.1+?)\n");
    return false;
  }
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before QueryInterface\n");
#endif
  _pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber,
      reinterpret_cast<void**>(&_pGrabber));

  // Set the media type to video
#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before SetMediaType\n");
#endif
  AM_MEDIA_TYPE mt;
  // Ask for video media producers that produce 8-bit RGB
  ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
  mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
  mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
  _pGrabber->SetMediaType(&mt);

  //-------------------------------------------------------------------
  // Ask for the video resolution that has been passed in.
  // This code is based on
  // intuiting that we need to use the SetFormat call on the IAMStreamConfig
  // interface; this interface is described in the help pages.
  // If the width and height are specified as 0, then they are not set
  // in the header, letting them use whatever is the default.
  if ( (width != 0) && (height != 0) ) {
    _pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrc,
			      IID_IAMStreamConfig, (void **)&_pStreamConfig);
    if (_pStreamConfig == NULL) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get StreamConfig interface\n");
      return false;
    }

    ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
    mt.majortype = MEDIATYPE_Video;	  // Ask for video media producers
    mt.subtype = MEDIASUBTYPE_RGB24;	  // Ask for 8 bit RGB
    mt.pbFormat = (BYTE*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
    VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)mt.pbFormat;
    ZeroMemory(pVideoHeader, sizeof(VIDEOINFOHEADER));
    pVideoHeader->bmiHeader.biBitCount = 24;
    pVideoHeader->bmiHeader.biWidth = width;
    pVideoHeader->bmiHeader.biHeight = height;
    pVideoHeader->bmiHeader.biPlanes = 1;
    pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pVideoHeader->bmiHeader.biSizeImage = DIBSIZE(pVideoHeader->bmiHeader);

    // Set the format type and size.
    mt.formattype = FORMAT_VideoInfo;
    mt.cbFormat = sizeof(VIDEOINFOHEADER);

    // Set the sample size.
    mt.bFixedSizeSamples = TRUE;
    mt.lSampleSize = DIBSIZE(pVideoHeader->bmiHeader);

    // Make the call to actually set the video type to what we want.
    if (_pStreamConfig->SetFormat(&mt) != S_OK) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set resolution to %dx%d\n",
	pVideoHeader->bmiHeader.biWidth, pVideoHeader->bmiHeader.biHeight);
      return false;
    }

    // Clean up the pbFormat header memory we allocated above.
    CoTaskMemFree(mt.pbFormat);
  }

  //-------------------------------------------------------------------
  // Create a NULL renderer that will be used to discard the video frames
  // on the output pin of the sample grabber

#ifdef	DEBUG
  printf("directx_camera_server::open_and_find_parameters(): Before CoCreateInstance NullRenderer\n");
#endif
  IBaseFilter *pNull = NULL;
  CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
      IID_IBaseFilter, reinterpret_cast<void**>(&pNull));

  //-------------------------------------------------------------------
  // Build the filter graph.  First add the filters and then connect them.

  // pSrc is the capture filter for the video device we found above.
  _pGraph->AddFilter(pSrc, L"Video Capture");

  // Add the sample grabber filter
  _pGraph->AddFilter(_pSampleGrabberFilter, L"SampleGrabber");

  // Add the null renderer filter
  _pGraph->AddFilter(pNull, L"NullRenderer");

  // Connect the output of the video reader to the sample grabber input
  ConnectTwoFilters(_pGraph, pSrc, _pSampleGrabberFilter);

  // Connect the output of the sample grabber to the NULL renderer input
  ConnectTwoFilters(_pGraph, _pSampleGrabberFilter, pNull);

  //-------------------------------------------------------------------
  // XXX See if this is a video tuner card by querying for that interface.
  // Set it to read the video channel if it is one.
  IAMTVTuner  *pTuner = NULL;
  hr = _pBuilder->FindInterface(NULL, NULL, pSrc, IID_IAMTVTuner, (void**)&pTuner);
  if (pTuner != NULL) {
#ifdef	DEBUG
    printf("directx_camera_server::open_and_find_parameters(): Found a TV Tuner!\n");
#endif

    //XXX Put code here.
    // Set the first input pin to use the cable as input
    hr = pTuner->put_InputType(0, TunerInputCable);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set input to cable\n");
    }

    // Set the channel on the video to be baseband (is this channel zero?)
    hr = pTuner->put_Channel(0, -1, -1);
    if (FAILED(hr)) {
      fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't set channel\n");
    }

    pTuner->Release();
  }
  

  //-------------------------------------------------------------------
  // Find _num_rows and _num_columns in the video stream.
  _pGrabber->GetConnectedMediaType(&mt);
  VIDEOINFOHEADER *pVih;
  if (mt.formattype == FORMAT_VideoInfo) {
      pVih = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
  } else {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Can't get video header type\n");
    return false;
  }

  // Number of rows and columns.  This is different if we are using a target
  // rectangle (rcTarget) than if we are not.
  if (IsRectEmpty(&pVih->rcTarget)) {
    _num_columns = pVih->bmiHeader.biWidth;
    _num_rows = pVih->bmiHeader.biHeight;
  } else {
    _num_columns = pVih->rcTarget.right;
    _num_rows = pVih->bmiHeader.biHeight;
    printf("XXX directx_camera_server::open_and_find_parameters(): Warning: may not work correctly with target rectangle\n");
  }
  _minX = 0;
  _maxX = _num_columns - 1;
  _minY = 0;
  _maxY = _num_rows - 1;
#ifdef DEBUG
  printf("Got %dx%d video\n", _num_columns, _num_rows);
#endif

  // Make sure that the image is not compressed and that we have 8 bits
  // per pixel.
  if (pVih->bmiHeader.biCompression != BI_RGB) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Compression not RGB\n");
    switch (pVih->bmiHeader.biCompression) {
      case BI_RLE8:
	fprintf(stderr,"  (It is BI_RLE8)\n");
	break;
      case BI_RLE4:
	fprintf(stderr,"  (It is BI_RLE4)\n");
      case BI_BITFIELDS:
	fprintf(stderr,"  (It is BI_BITFIELDS)\n");
	break;
      default:
	fprintf(stderr,"  (Unknown compression type)\n");
    }
    return false;
  }
  int BytesPerPixel = pVih->bmiHeader.biBitCount / 8;
  if (BytesPerPixel != 3) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Not 3 bytes per pixel (%d)\n",
      pVih->bmiHeader.biBitCount);
    return false;
  }

  // A negative height indicates that the images are stored non-inverted in Y
  // Not sure what to do with images that have negative height -- need to
  // read the book some more to find out.
  if (_num_rows < 0) {
    fprintf(stderr,"directx_camera_server::open_and_find_parameters(): Num Rows is negative (internal error)\n");
    return false;
  }

  // Find the stride to take when moving from one row of video to the
  // next.  This is rounded up to the nearest DWORD.
  _stride = (_num_columns * BytesPerPixel + 3) & ~3;

  // Set the callback, where '0' means 'use the SampleCB callback'
  _pGrabber->SetCallback(_pCallback, 0);

  //-------------------------------------------------------------------
  // Release resources that won't be used later and return
  pSrc->Release();
  pNull->Release();
  return true;
}