int CaptureClass::getProperty(int aProperty, float &aValue, int &aAuto) { HRESULT hr; IAMVideoProcAmp *procAmp = NULL; IAMCameraControl *control = NULL; aAuto = 0; aValue = -1; int prop = escapiPropToMFProp(aProperty); if (aProperty < CAPTURE_PAN) { hr = mSource->QueryInterface(IID_PPV_ARGS(&procAmp)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = procAmp->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { long v = 0, f = 0; hr = procAmp->Get(prop, &v, &f); if (SUCCEEDED(hr)) { aValue = (v - min) / (float)(max - min); aAuto = !!(f & VideoProcAmp_Flags_Auto); } } procAmp->Release(); return 0; } } else { hr = mSource->QueryInterface(IID_PPV_ARGS(&control)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = control->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { long v = 0, f = 0; hr = control->Get(prop, &v, &f); if (SUCCEEDED(hr)) { aValue = (v - min) / (float)(max - min); aAuto = !!(f & VideoProcAmp_Flags_Auto); } } control->Release(); return 0; } } return 1; }
QVariantList CaptureDShow::imageControls(IBaseFilter *filter) const { if (!filter) return QVariantList(); qint32 min; qint32 max; qint32 step; qint32 defaultValue; qint32 flags; qint32 value; QVariantList controls; IAMVideoProcAmp *pProcAmp = NULL; if (SUCCEEDED(filter->QueryInterface(IID_IAMVideoProcAmp, reinterpret_cast<void **>(&pProcAmp)))) { for (const VideoProcAmpProperty &property: vpapToStr->keys()) { if (SUCCEEDED(pProcAmp->GetRange(property, reinterpret_cast<LONG *>(&min), reinterpret_cast<LONG *>(&max), reinterpret_cast<LONG *>(&step), reinterpret_cast<LONG *>(&defaultValue), reinterpret_cast<LONG *>(&flags)))) if (SUCCEEDED(pProcAmp->Get(property, reinterpret_cast<LONG *>(&value), reinterpret_cast<LONG *>(&flags)))) { QVariantList control; QString type; if (property == VideoProcAmp_ColorEnable || property == VideoProcAmp_BacklightCompensation) type = "boolean"; else type = "integer"; if (value == defaultValue) defaultValue = (min + max) / 2; control << vpapToStr->value(property) << type << min << max << step << defaultValue << value << QStringList(); controls << QVariant(control); } } pProcAmp->Release(); } return controls; }
bool videoInputCamera::setVideoSettingValue(long prop, long value, long flag){ IAMVideoProcAmp *lpAMVideoProcAmp = NULL; HRESULT hr = pInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&lpAMVideoProcAmp); if (FAILED(hr)) return false; hr = lpAMVideoProcAmp->Set(prop, value, flag); lpAMVideoProcAmp->Release(); if (FAILED(hr)) return false; else return true; }
bool videoInputCamera::getVideoSettingRange(long prop, long &min, long &max, long &step, long &flag, long &dflt) { IAMVideoProcAmp *lpAMVideoProcAmp = NULL; HRESULT hr = pInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&lpAMVideoProcAmp); if (FAILED(hr)) return false; hr = lpAMVideoProcAmp->GetRange(prop, &min, &max, &step, &dflt, &flag); lpAMVideoProcAmp->Release(); if (FAILED(hr)) return false; else return true; }
CaptureDeviceParameters MediaFoundationVideoDevice::GetParameters() { CaptureDeviceParameters out; if(!this->IsSetup) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: device is not set up"); return out; } if(this->Source==NULL) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: invalid source"); return out; } IAMVideoProcAmp *pProcAmp = NULL; HRESULT hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_VIDEO_PROC_PARAMETERS; i++) { Parameter temp; hr = pProcAmp->GetRange(VideoProcAmp_Brightness+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); if (SUCCEEDED(hr)) { temp.CurrentValue = temp.Default; out.VideoProcParameters[i] = temp; } } pProcAmp->Release(); } IAMCameraControl *pProcControl = NULL; hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcControl)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_CAMERA_CONTROL_PARAMETERS; i++) { Parameter temp; hr = pProcControl->GetRange(CameraControl_Pan+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); if (SUCCEEDED(hr)) { temp.CurrentValue = temp.Default; out.CameraControlParameters[i] = temp; } } pProcControl->Release(); } return out; }
int CaptureClass::setProperty(int aProperty, float aValue, int aAuto) { HRESULT hr; IAMVideoProcAmp *procAmp = NULL; IAMCameraControl *control = NULL; int prop = escapiPropToMFProp(aProperty); if (aProperty < CAPTURE_PAN) { hr = mSource->QueryInterface(IID_PPV_ARGS(&procAmp)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = procAmp->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { LONG val = (long)floor(min + (max - min) * aValue); if (aAuto) val = def; hr = procAmp->Set(prop, val, aAuto ? VideoProcAmp_Flags_Auto : VideoProcAmp_Flags_Manual); } procAmp->Release(); return !!SUCCEEDED(hr); } } else { hr = mSource->QueryInterface(IID_PPV_ARGS(&control)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = control->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { LONG val = (long)floor(min + (max - min) * aValue); if (aAuto) val = def; hr = control->Set(prop, val, aAuto ? VideoProcAmp_Flags_Auto : VideoProcAmp_Flags_Manual); } control->Release(); return !!SUCCEEDED(hr); } } return 1; }
//---------------------------------------------------------------------------- void MediaFoundationVideoDevice::SetParameters(CaptureDeviceParameters newParameters) { if(!this->IsSetup) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: device is not set up"); return; } if(this->Source==NULL) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: invalid source"); return; } IAMVideoProcAmp *pProcAmp = NULL; HRESULT hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_VIDEO_PROC_PARAMETERS; i++) { if(this->PreviousParameters.VideoProcParameters[i].CurrentValue != newParameters.VideoProcParameters[i].CurrentValue || this->PreviousParameters.VideoProcParameters[i].Flag != newParameters.VideoProcParameters[i].Flag) { hr = pProcAmp->Set(VideoProcAmp_Brightness + i, newParameters.VideoProcParameters[i].CurrentValue, newParameters.VideoProcParameters[i].Flag); } } pProcAmp->Release(); } IAMCameraControl *pProcControl = NULL; hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcControl)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_CAMERA_CONTROL_PARAMETERS; i++) { if(this->PreviousParameters.CameraControlParameters[i].CurrentValue != newParameters.CameraControlParameters[i].CurrentValue || this->PreviousParameters.CameraControlParameters[i].Flag != newParameters.CameraControlParameters[i].Flag) { hr = pProcControl->Set(CameraControl_Pan+i, newParameters.CameraControlParameters[i].CurrentValue, newParameters.CameraControlParameters[i].Flag); } } pProcControl->Release(); } this->PreviousParameters = newParameters; }
Void ReadDemo() { HRESULT hr; IAMVideoProcAmp *pVideoProc; /* [out] */ long Min; /* [out] */ long Max; /* [out] */ long SteppingDelta; /* [out] */ long Default; /* [out] */ long CapsFlags = 0; long val_brightness, flag; long val_whitebalance; long val_gain; hr = cpVideoCapture->QueryInterface(__uuidof(IAMVideoProcAmp), (void **)&pVideoProc); if( FAILED(hr)){ printf("Querying failed\n");} pVideoProc->GetRange( KSPROPERTY_VIDEOPROCAMP_BRIGHTNESS, &Min, &Max, &SteppingDelta, &Default, &CapsFlags); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_BRIGHTNESS, &val_brightness, &flag); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_WHITEBALANCE, &val_whitebalance, &flag); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_GAIN, &val_gain, &flag); int g = 0; pVideoProc->Release(); }
bool CaptureDShow::setImageControls(IBaseFilter *filter, const QVariantMap &imageControls) const { if (!filter) return false; IAMVideoProcAmp *pProcAmp = NULL; if (SUCCEEDED(filter->QueryInterface(IID_IAMVideoProcAmp, reinterpret_cast<void **>(&pProcAmp)))) { for (const VideoProcAmpProperty &property: vpapToStr->keys()) { QString propertyStr = vpapToStr->value(property); if (imageControls.contains(propertyStr)) pProcAmp->Set(property, imageControls[propertyStr].toInt(), VideoProcAmp_Flags_Manual); } pProcAmp->Release(); } return true; }
int main() { // for playing IGraphBuilder *pGraphBuilder; ICaptureGraphBuilder2 *pCaptureGraphBuilder2; IMediaControl *pMediaControl; IBaseFilter *pDeviceFilter = NULL; // to select a video input device ICreateDevEnum *pCreateDevEnum = NULL; IEnumMoniker *pEnumMoniker = NULL; IMoniker *pMoniker = NULL; ULONG nFetched = 0; // initialize COM CoInitialize(NULL); // // selecting a device // // Create CreateDevEnum to list device CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum); // Create EnumMoniker to list VideoInputDevice pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0); if (pEnumMoniker == NULL) { // this will be shown if there is no capture device printf("no device\n"); return 0; } // reset EnumMoniker pEnumMoniker->Reset(); // get each Moniker while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK) { IPropertyBag *pPropertyBag; TCHAR devname[256]; // bind to IPropertyBag pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropertyBag); VARIANT var; // get FriendlyName var.vt = VT_BSTR; pPropertyBag->Read(L"FriendlyName", &var, 0); WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, devname, sizeof(devname), 0, 0); VariantClear(&var); printf("%s\r\n", devname); printf(" select this device ? [y] or [n]\r\n"); int ch = getchar(); // you can start playing by 'y' + return key // if you press the other key, it will not be played. if (ch == 'y') { // Bind Monkier to Filter pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pDeviceFilter ); } else { getchar(); } // release pMoniker->Release(); pPropertyBag->Release(); if (pDeviceFilter != NULL) { // go out of loop if getchar() returns 'y' break; } } if (pDeviceFilter != NULL) { // // PLAY // // create FilterGraph CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC, IID_IGraphBuilder, (LPVOID *)&pGraphBuilder); // create CaptureGraphBuilder2 CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (LPVOID *)&pCaptureGraphBuilder2); //============================================================ //=========== MY CODE ====================================== //============================================================= HRESULT hr = CoInitialize(0); IAMStreamConfig *pConfig = NULL; hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig); int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { /* Examine the format, and possibly use it. */ if ((pmtConfig->majortype == MEDIATYPE_Video) && (pmtConfig->subtype == MEDIASUBTYPE_RGB24) && (pmtConfig->formattype == FORMAT_VideoInfo) && (pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) && (pmtConfig->pbFormat != NULL)) { VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat; // pVih contains the detailed format information. LONG lWidth = pVih->bmiHeader.biWidth; LONG lHeight = pVih->bmiHeader.biHeight; if( lWidth == 1280 ) // if (iFormat == 26) { //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB' hr = pConfig->SetFormat(pmtConfig); } } // Delete the media type when you are done. DeleteMediaType(pmtConfig); } } } // Query the capture filter for the IAMCameraControl interface. IAMCameraControl *pCameraControl = 0; hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl); if (FAILED(hr)) { // The device does not support IAMCameraControl } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1 hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual ); } } // Query the capture filter for the IAMVideoProcAmp interface. IAMVideoProcAmp *pProcAmp = 0; hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) { // The device does not support IAMVideoProcAmp } else { long Min, Max, Step, Default, Flags, Val; // Get the range and default values hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) { hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual); hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual); } } //============================================================ //=========== END MY CODE ====================================== //============================================================= hr = S_OK; CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr ); IBaseFilter * ttt = 0; trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt); // set FilterGraph hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder); // get MediaControl interface hr = pGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID *)&pMediaControl); // add device filter to FilterGraph hr = pGraphBuilder->AddFilter(ttt, L"Dif trans"); hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter"); // create Graph hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE, NULL, pDeviceFilter, NULL, NULL); // start playing hr = pMediaControl->Run(); // to block execution // without this messagebox, the graph will be stopped immediately MessageBox(NULL, "Block Execution", "Block", MB_OK); // release pMediaControl->Release(); pCaptureGraphBuilder2->Release(); pGraphBuilder->Release(); } // release pEnumMoniker->Release(); pCreateDevEnum->Release(); // finalize COM CoUninitialize(); return 0; }
// use cameraID 1 for first and so on HRESULT VideoTexture::init(int cameraID) { if (cameraID <= 0) return S_FALSE; glEnable(GL_TEXTURE_2D); // Texture -> This will be put into the camera module glGenTextures(1, textures); // Create The Texture // Typical Texture Generation Using Data From The Bitmap for (int i = 0; i < 1; i++) { //glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, textures[i]); // Generate The Texture (640x480... make changeable!) //glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); // Linear Filtering glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); // Linear Filtering // Enable Texture Mapping glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); } // Video stuff: // Create captue graph builder: HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild); if (FAILED(hr)) return hr; IEnumMoniker *enumerator; hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator); //DisplayDeviceInformation(enumerator); // Take the first camera: IMoniker *pMoniker = NULL; for (int i = 0; i < cameraID; i++) { enumerator->Next(1, &pMoniker, NULL); } IBaseFilter *pCap = NULL; hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap); if (SUCCEEDED(hr)) { hr = pGraph->AddFilter(pCap, L"Capture Filter"); if (FAILED(hr)) return hr; } else return hr; // Create the Sample Grabber which we will use // To take each frame for texture generation hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_ISampleGrabber, (void **)&pGrabber); if (FAILED(hr)) return hr; hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase); // We have to set the 24-bit RGB desire here // So that the proper conversion filters // Are added automatically. AM_MEDIA_TYPE desiredType; memset(&desiredType, 0, sizeof(desiredType)); desiredType.majortype = MEDIATYPE_Video; desiredType.subtype = MEDIASUBTYPE_RGB24; desiredType.formattype = FORMAT_VideoInfo; pGrabber->SetMediaType(&desiredType); pGrabber->SetBufferSamples(TRUE); // add to Graph pGraph->AddFilter(pGrabberBase, L"Grabber"); /* Null render filter */ hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender); if(FAILED(hr)) return hr; pGraph->AddFilter(pNullRender, L"Render"); // Connect the graph hr = ConnectFilters(pGraph, pCap, pGrabberBase); if(FAILED(hr)) return hr; hr = ConnectFilters(pGraph, pGrabberBase, pNullRender); // Set output format of capture: IAMStreamConfig *pConfig = NULL; hr = pBuild->FindInterface( &PIN_CATEGORY_CAPTURE, // Capture pin. 0, // Any media type. pCap, // Pointer to the capture filter. IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) return hr; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetFormat(&pmtConfig); if (FAILED(hr)) return hr; // Try and find a good video format int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat; if (hdr->bmiHeader.biWidth == CAM_WIDTH && hdr->bmiHeader.biHeight == CAM_HEIGHT && hdr->bmiHeader.biBitCount == 24) { pConfig->SetFormat(pmtConfig); } } } } pConfig->Release(); // Set camera stuff IAMCameraControl *pCamControl = NULL; hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl); if (FAILED(hr)) return hr; // Get the range and default value. long Min, Max, Step, Default, Flags; // For getting: long Val; hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual); #if 0 hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual); #endif pCamControl->Release(); IAMVideoProcAmp *pProcAmp = 0; hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) return hr; #if 0 hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual); #endif pProcAmp->Release(); hr = pMediaControl->Run(); return hr; }