int CaptureClass::getProperty(int aProperty, float &aValue, int &aAuto) { HRESULT hr; IAMVideoProcAmp *procAmp = NULL; IAMCameraControl *control = NULL; aAuto = 0; aValue = -1; int prop = escapiPropToMFProp(aProperty); if (aProperty < CAPTURE_PAN) { hr = mSource->QueryInterface(IID_PPV_ARGS(&procAmp)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = procAmp->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { long v = 0, f = 0; hr = procAmp->Get(prop, &v, &f); if (SUCCEEDED(hr)) { aValue = (v - min) / (float)(max - min); aAuto = !!(f & VideoProcAmp_Flags_Auto); } } procAmp->Release(); return 0; } } else { hr = mSource->QueryInterface(IID_PPV_ARGS(&control)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = control->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { long v = 0, f = 0; hr = control->Get(prop, &v, &f); if (SUCCEEDED(hr)) { aValue = (v - min) / (float)(max - min); aAuto = !!(f & VideoProcAmp_Flags_Auto); } } control->Release(); return 0; } } return 1; }
Void ReadDemo() { HRESULT hr; IAMVideoProcAmp *pVideoProc; /* [out] */ long Min; /* [out] */ long Max; /* [out] */ long SteppingDelta; /* [out] */ long Default; /* [out] */ long CapsFlags = 0; long val_brightness, flag; long val_whitebalance; long val_gain; hr = cpVideoCapture->QueryInterface(__uuidof(IAMVideoProcAmp), (void **)&pVideoProc); if( FAILED(hr)){ printf("Querying failed\n");} pVideoProc->GetRange( KSPROPERTY_VIDEOPROCAMP_BRIGHTNESS, &Min, &Max, &SteppingDelta, &Default, &CapsFlags); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_BRIGHTNESS, &val_brightness, &flag); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_WHITEBALANCE, &val_whitebalance, &flag); pVideoProc->Get( KSPROPERTY_VIDEOPROCAMP_GAIN, &val_gain, &flag); int g = 0; pVideoProc->Release(); }
QVariantList CaptureDShow::imageControls(IBaseFilter *filter) const { if (!filter) return QVariantList(); qint32 min; qint32 max; qint32 step; qint32 defaultValue; qint32 flags; qint32 value; QVariantList controls; IAMVideoProcAmp *pProcAmp = NULL; if (SUCCEEDED(filter->QueryInterface(IID_IAMVideoProcAmp, reinterpret_cast<void **>(&pProcAmp)))) { for (const VideoProcAmpProperty &property: vpapToStr->keys()) { if (SUCCEEDED(pProcAmp->GetRange(property, reinterpret_cast<LONG *>(&min), reinterpret_cast<LONG *>(&max), reinterpret_cast<LONG *>(&step), reinterpret_cast<LONG *>(&defaultValue), reinterpret_cast<LONG *>(&flags)))) if (SUCCEEDED(pProcAmp->Get(property, reinterpret_cast<LONG *>(&value), reinterpret_cast<LONG *>(&flags)))) { QVariantList control; QString type; if (property == VideoProcAmp_ColorEnable || property == VideoProcAmp_BacklightCompensation) type = "boolean"; else type = "integer"; if (value == defaultValue) defaultValue = (min + max) / 2; control << vpapToStr->value(property) << type << min << max << step << defaultValue << value << QStringList(); controls << QVariant(control); } } pProcAmp->Release(); } return controls; }
bool videoInputCamera::setVideoSettingValue(long prop, long value, long flag){ IAMVideoProcAmp *lpAMVideoProcAmp = NULL; HRESULT hr = pInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&lpAMVideoProcAmp); if (FAILED(hr)) return false; hr = lpAMVideoProcAmp->Set(prop, value, flag); lpAMVideoProcAmp->Release(); if (FAILED(hr)) return false; else return true; }
bool videoInputCamera::getVideoSettingRange(long prop, long &min, long &max, long &step, long &flag, long &dflt) { IAMVideoProcAmp *lpAMVideoProcAmp = NULL; HRESULT hr = pInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&lpAMVideoProcAmp); if (FAILED(hr)) return false; hr = lpAMVideoProcAmp->GetRange(prop, &min, &max, &step, &dflt, &flag); lpAMVideoProcAmp->Release(); if (FAILED(hr)) return false; else return true; }
CaptureDeviceParameters MediaFoundationVideoDevice::GetParameters() { CaptureDeviceParameters out; if(!this->IsSetup) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: device is not set up"); return out; } if(this->Source==NULL) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: invalid source"); return out; } IAMVideoProcAmp *pProcAmp = NULL; HRESULT hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_VIDEO_PROC_PARAMETERS; i++) { Parameter temp; hr = pProcAmp->GetRange(VideoProcAmp_Brightness+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); if (SUCCEEDED(hr)) { temp.CurrentValue = temp.Default; out.VideoProcParameters[i] = temp; } } pProcAmp->Release(); } IAMCameraControl *pProcControl = NULL; hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcControl)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_CAMERA_CONTROL_PARAMETERS; i++) { Parameter temp; hr = pProcControl->GetRange(CameraControl_Pan+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag); if (SUCCEEDED(hr)) { temp.CurrentValue = temp.Default; out.CameraControlParameters[i] = temp; } } pProcControl->Release(); } return out; }
int CaptureClass::setProperty(int aProperty, float aValue, int aAuto) { HRESULT hr; IAMVideoProcAmp *procAmp = NULL; IAMCameraControl *control = NULL; int prop = escapiPropToMFProp(aProperty); if (aProperty < CAPTURE_PAN) { hr = mSource->QueryInterface(IID_PPV_ARGS(&procAmp)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = procAmp->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { LONG val = (long)floor(min + (max - min) * aValue); if (aAuto) val = def; hr = procAmp->Set(prop, val, aAuto ? VideoProcAmp_Flags_Auto : VideoProcAmp_Flags_Manual); } procAmp->Release(); return !!SUCCEEDED(hr); } } else { hr = mSource->QueryInterface(IID_PPV_ARGS(&control)); if (SUCCEEDED(hr)) { long min, max, step, def, caps; hr = control->GetRange(prop, &min, &max, &step, &def, &caps); if (SUCCEEDED(hr)) { LONG val = (long)floor(min + (max - min) * aValue); if (aAuto) val = def; hr = control->Set(prop, val, aAuto ? VideoProcAmp_Flags_Auto : VideoProcAmp_Flags_Manual); } control->Release(); return !!SUCCEEDED(hr); } } return 1; }
//---------------------------------------------------------------------------- void MediaFoundationVideoDevice::SetParameters(CaptureDeviceParameters newParameters) { if(!this->IsSetup) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: device is not set up"); return; } if(this->Source==NULL) { LOG_ERROR("MediaFoundationVideoDevice::SetParameters failed: invalid source"); return; } IAMVideoProcAmp *pProcAmp = NULL; HRESULT hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_VIDEO_PROC_PARAMETERS; i++) { if(this->PreviousParameters.VideoProcParameters[i].CurrentValue != newParameters.VideoProcParameters[i].CurrentValue || this->PreviousParameters.VideoProcParameters[i].Flag != newParameters.VideoProcParameters[i].Flag) { hr = pProcAmp->Set(VideoProcAmp_Brightness + i, newParameters.VideoProcParameters[i].CurrentValue, newParameters.VideoProcParameters[i].Flag); } } pProcAmp->Release(); } IAMCameraControl *pProcControl = NULL; hr = this->Source->QueryInterface(IID_PPV_ARGS(&pProcControl)); if (SUCCEEDED(hr)) { for(unsigned int i = 0; i < CaptureDeviceParameters::NUMBER_OF_CAMERA_CONTROL_PARAMETERS; i++) { if(this->PreviousParameters.CameraControlParameters[i].CurrentValue != newParameters.CameraControlParameters[i].CurrentValue || this->PreviousParameters.CameraControlParameters[i].Flag != newParameters.CameraControlParameters[i].Flag) { hr = pProcControl->Set(CameraControl_Pan+i, newParameters.CameraControlParameters[i].CurrentValue, newParameters.CameraControlParameters[i].Flag); } } pProcControl->Release(); } this->PreviousParameters = newParameters; }
bool CaptureDShow::setImageControls(IBaseFilter *filter, const QVariantMap &imageControls) const { if (!filter) return false; IAMVideoProcAmp *pProcAmp = NULL; if (SUCCEEDED(filter->QueryInterface(IID_IAMVideoProcAmp, reinterpret_cast<void **>(&pProcAmp)))) { for (const VideoProcAmpProperty &property: vpapToStr->keys()) { QString propertyStr = vpapToStr->value(property); if (imageControls.contains(propertyStr)) pProcAmp->Set(property, imageControls[propertyStr].toInt(), VideoProcAmp_Flags_Manual); } pProcAmp->Release(); } return true; }
// use cameraID 1 for first and so on HRESULT VideoTexture::init(int cameraID) { if (cameraID <= 0) return S_FALSE; glEnable(GL_TEXTURE_2D); // Texture -> This will be put into the camera module glGenTextures(1, textures); // Create The Texture // Typical Texture Generation Using Data From The Bitmap for (int i = 0; i < 1; i++) { //glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, textures[i]); // Generate The Texture (640x480... make changeable!) //glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); // Linear Filtering glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); // Linear Filtering // Enable Texture Mapping glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); } // Video stuff: // Create captue graph builder: HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild); if (FAILED(hr)) return hr; IEnumMoniker *enumerator; hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator); //DisplayDeviceInformation(enumerator); // Take the first camera: IMoniker *pMoniker = NULL; for (int i = 0; i < cameraID; i++) { enumerator->Next(1, &pMoniker, NULL); } IBaseFilter *pCap = NULL; hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap); if (SUCCEEDED(hr)) { hr = pGraph->AddFilter(pCap, L"Capture Filter"); if (FAILED(hr)) return hr; } else return hr; // Create the Sample Grabber which we will use // To take each frame for texture generation hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_ISampleGrabber, (void **)&pGrabber); if (FAILED(hr)) return hr; hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase); // We have to set the 24-bit RGB desire here // So that the proper conversion filters // Are added automatically. AM_MEDIA_TYPE desiredType; memset(&desiredType, 0, sizeof(desiredType)); desiredType.majortype = MEDIATYPE_Video; desiredType.subtype = MEDIASUBTYPE_RGB24; desiredType.formattype = FORMAT_VideoInfo; pGrabber->SetMediaType(&desiredType); pGrabber->SetBufferSamples(TRUE); // add to Graph pGraph->AddFilter(pGrabberBase, L"Grabber"); /* Null render filter */ hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender); if(FAILED(hr)) return hr; pGraph->AddFilter(pNullRender, L"Render"); // Connect the graph hr = ConnectFilters(pGraph, pCap, pGrabberBase); if(FAILED(hr)) return hr; hr = ConnectFilters(pGraph, pGrabberBase, pNullRender); // Set output format of capture: IAMStreamConfig *pConfig = NULL; hr = pBuild->FindInterface( &PIN_CATEGORY_CAPTURE, // Capture pin. 0, // Any media type. pCap, // Pointer to the capture filter. IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) return hr; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetFormat(&pmtConfig); if (FAILED(hr)) return hr; // Try and find a good video format int iCount = 0, iSize = 0; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE *pmtConfig; hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat; if (hdr->bmiHeader.biWidth == CAM_WIDTH && hdr->bmiHeader.biHeight == CAM_HEIGHT && hdr->bmiHeader.biBitCount == 24) { pConfig->SetFormat(pmtConfig); } } } } pConfig->Release(); // Set camera stuff IAMCameraControl *pCamControl = NULL; hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl); if (FAILED(hr)) return hr; // Get the range and default value. long Min, Max, Step, Default, Flags; // For getting: long Val; hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual); #if 0 hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual); #endif pCamControl->Release(); IAMVideoProcAmp *pProcAmp = 0; hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp); if (FAILED(hr)) return hr; #if 0 hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual); hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags); if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual); #endif pProcAmp->Release(); hr = pMediaControl->Run(); return hr; }