void VideoGrabber::Init() { mGrabNextFrame = FALSE; mPreviousFrameExists = FALSE; // Setup capture window and connect webcam driver camhwnd = capCreateCaptureWindow (_T("Ahtung!"), 0 , 0, 0, FRAME_WIDTH, FRAME_HEIGHT, 0, 0); SendMessage(camhwnd, WM_CAP_DRIVER_CONNECT, 0, 0); capSetCallbackOnFrame(camhwnd, FrameCallbackProc); capSetCallbackOnVideoStream(camhwnd, FrameCallbackProc); // Use same callback function, consider mGrabNextFrame flag! capSetUserData(camhwnd, this); // Callback functions may use pointer to this VideoGrabber if (mPreviousFrame != NULL) { delete[] mPreviousFrame; mPreviousFrame = NULL; } mMotionDetectedDuringLastSecond = FALSE; // TODO: Use MPEGLAYER3WAVEFORMAT instead this // Setup audio params WAVEFORMATEX wfex; wfex.wFormatTag = WAVE_FORMAT_PCM; wfex.nChannels = 1; // Use mono wfex.nSamplesPerSec = 8000; wfex.nAvgBytesPerSec = 8000; wfex.nBlockAlign = 1; wfex.wBitsPerSample = 8; wfex.cbSize = 0; capSetAudioFormat(camhwnd, &wfex, sizeof(WAVEFORMATEX)); // Setup video capturing and streaming CAPTUREPARMS parms; capCaptureGetSetup(camhwnd, &parms, sizeof(CAPTUREPARMS)); parms.fAbortLeftMouse = FALSE; parms.wPercentDropForError = 100; // Never abort capturing in case of dropped frames parms.fAbortRightMouse = FALSE; //parms.fLimitEnabled = TRUE; //parms.wTimeLimit = 0; // TODO! parms.fYield = TRUE; // TODO! capCaptureSetSetup(camhwnd, &parms, sizeof(parms)); // !!! capSetCallbackOnError(camhwnd, capErrorCallback); // Resume thread for motion detection mListenerHandle = CreateThread(0, 0, ListeningRoutine, this, CREATE_SUSPENDED, &mThreadID); SetEnabled(TRUE); ResumeThread(mListenerHandle); }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(&caps)); ::MoveWindow( hWndC, 0, 0, 320, 240, TRUE ); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); } return capWnd != 0; }
void MMCapture::captureInit(UINT framesPerSecond, UINT audioBufferSize) { DWORD style = WS_CHILD; m_captureWindow = capCreateCaptureWindow(_T("my capture window"), style,0,0,640,480,m_receiver.getWindow(),1); if(m_captureWindow == NULL) { throwException(_T("%s:Cannot create CaptureWindow:%s"),__TFUNCTION__,getLastErrorText().cstr()); } try { CHECKRESULT(capSetUserData( m_captureWindow, this)); if(captureVideo()) { CHECKRESULT(capDriverConnect(m_captureWindow, 0 )); m_webCamConnected = true; } CAPTUREPARMS param; CHECKRESULT(capCaptureGetSetup(m_captureWindow,¶m,sizeof(param))); param.dwRequestMicroSecPerFrame = 1000000 / framesPerSecond; param.fYield = TRUE; param.AVStreamMaster = AVSTREAMMASTER_AUDIO; // AVSTREAMMASTER_NONE; param.dwAudioBufferSize = audioBufferSize; CHECKRESULT(capCaptureSetSetup(m_captureWindow,¶m,sizeof(param))); if(captureAudio()) { int audioFormatSize = capGetAudioFormat(m_captureWindow,&m_audioFormat, sizeof(m_audioFormat)); CHECKRESULT(capSetCallbackOnWaveStream( m_captureWindow, captureWaveStreamCallback)); } if(captureVideo()) { int videoFormatSize = capGetVideoFormat(m_captureWindow,&m_videoFormat, sizeof(m_videoFormat)); CHECKRESULT(capSetCallbackOnVideoStream(m_captureWindow, captureVideoStreamCallback)); CHECKRESULT(capSetCallbackOnFrame( m_captureWindow, captureFrameCallback)); } CHECKRESULT(capSetCallbackOnStatus( m_captureWindow, captureStatusCallback)); CHECKRESULT(capSetCallbackOnCapControl( m_captureWindow, captureControlCallback)); CHECKRESULT(capSetCallbackOnError( m_captureWindow, captureErrorCallback)); if(captureAudio() && m_playAudio) { m_audioThread = new AudioPlayerThread(*this); TRACE_NEW(m_audioThread); m_audioThread->start(); } } catch(...) { captureCleanup(); throw; } }
bool CVideoCap::Initialize(int nWidth, int nHeight) { // CAPTUREPARMS gCapTureParms ; //视频驱动器的能力 CAPDRIVERCAPS gCapDriverCaps; DWORD dwSize; if (!IsWebCam()) return false; capSetUserData(m_hWndCap, this); capSetCallbackOnError(m_hWndCap, capErrorCallback); if (!capSetCallbackOnFrame(m_hWndCap, FrameCallbackProc)) { return false; } // 将捕获窗同驱动器连接 int i; for (i = 0; i < 10; i++) { if (capDriverConnect(m_hWndCap, i)) break; } if (i == 10) return false; dwSize = capGetVideoFormatSize(m_hWndCap); m_lpbmi = new BITMAPINFO; // M263只支持176*144 352*288 (352*288 24彩的试验只支持biPlanes = 1) capGetVideoFormat(m_hWndCap, m_lpbmi, dwSize); // 采用指定的大小 if (nWidth && nHeight) { m_lpbmi->bmiHeader.biWidth = nWidth; m_lpbmi->bmiHeader.biHeight = nHeight; m_lpbmi->bmiHeader.biPlanes = 1; m_lpbmi->bmiHeader.biSizeImage = (((m_lpbmi->bmiHeader.biWidth * m_lpbmi->bmiHeader.biBitCount + 31) & ~31) >> 3) * m_lpbmi->bmiHeader.biHeight; // 实验得知一些摄像头不支持指定的分辩率 if (!capSetVideoFormat(m_hWndCap, m_lpbmi, sizeof(BITMAPINFO))) return false; }
static int vfw_probe (zbar_video_t *vdo) { video_state_t *state = vdo->state; state->bi_size = capGetVideoFormatSize(state->hwnd); BITMAPINFOHEADER *bih = state->bih = realloc(state->bih, state->bi_size); /* FIXME check OOM */ if(!capSetUserData(state->hwnd, (LONG)vdo) || !state->bi_size || !bih || !capGetVideoFormat(state->hwnd, bih, state->bi_size)) return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__, "setting up video capture")); zprintf(3, "initial format: " BIH_FMT " (bisz=%x)\n", BIH_FIELDS(bih), state->bi_size); if(!vdo->width || !vdo->height) { vdo->width = bih->biWidth; vdo->height = bih->biHeight; } vdo->datalen = bih->biSizeImage; zprintf(2, "probing supported formats:\n"); vdo->formats = calloc(VFW_NUM_FORMATS, sizeof(uint32_t)); int n = 0; const uint32_t *fmt; for(fmt = vfw_formats; *fmt; fmt++) if(vfw_probe_format(vdo, *fmt)) vdo->formats[n++] = *fmt; vdo->formats = realloc(vdo->formats, (n + 1) * sizeof(uint32_t)); vdo->width = bih->biWidth; vdo->height = bih->biHeight; vdo->intf = VIDEO_VFW; vdo->init = vfw_init; vdo->start = vfw_start; vdo->stop = vfw_stop; vdo->cleanup = vfw_cleanup; vdo->nq = vfw_nq; vdo->dq = vfw_dq; return(0); }
static int _vfw_engine_setup(VfwEngine *obj){ CAPTUREPARMS capparam ; capCaptureGetSetup(obj->capvideo,&capparam,sizeof(capparam)) ; capparam.dwRequestMicroSecPerFrame = 33000 ; /*makes around 30fps*/ // detach capture from application capparam.fYield = TRUE ; capparam.fMakeUserHitOKToCapture = FALSE; capparam.fAbortLeftMouse = FALSE; capparam.fAbortRightMouse = FALSE; capparam.wPercentDropForError = 90 ; capparam.fCaptureAudio = FALSE ; capparam.fAbortRightMouse = FALSE; capparam.fAbortLeftMouse = FALSE; capparam.AVStreamMaster = AVSTREAMMASTER_NONE ; if (!capCaptureSetSetup(obj->capvideo,&capparam,sizeof(capparam))){ ms_error("capCaptureSetSetup failed."); return -1; } capSetUserData(obj->capvideo, obj); return 0; }
CameraDevice* camera_device_open(const char* name, int inp_channel) { WndCameraDevice* wcd; /* Allocate descriptor and initialize windows-specific fields. */ wcd = _camera_device_alloc(); if (wcd == NULL) { E("%s: Unable to allocate WndCameraDevice instance", __FUNCTION__); return NULL; } wcd->window_name = (name != NULL) ? ASTRDUP(name) : ASTRDUP(_default_window_name); if (wcd->window_name == NULL) { E("%s: Unable to save window name", __FUNCTION__); _camera_device_free(wcd); return NULL; } wcd->input_channel = inp_channel; /* Create capture window that is a child of HWND_MESSAGE window. * We make it invisible, so it doesn't mess with the UI. Also * note that we supply standard HWND_MESSAGE window handle as * the parent window, since we don't want video capturing * machinery to be dependent on the details of our UI. */ wcd->cap_window = capCreateCaptureWindow(wcd->window_name, WS_CHILD, 0, 0, 0, 0, HWND_MESSAGE, 1); if (wcd->cap_window == NULL) { E("%s: Unable to create video capturing window '%s': %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_free(wcd); return NULL; } /* Save capture window descriptor as window's user data. */ capSetUserData(wcd->cap_window, wcd); return &wcd->header; }
/* Resets camera device after capturing. * Since new capture request may require different frame dimensions we must * reset frame info cached in the capture window. The only way to do that would * be closing, and reopening it again. */ static void _camera_device_reset(WndCameraDevice* cd) { if (cd != NULL && cd->cap_window != NULL) { capDriverDisconnect(cd->cap_window); if (cd->dc != NULL) { ReleaseDC(cd->cap_window, cd->dc); cd->dc = NULL; } if (cd->gdi_bitmap != NULL) { free(cd->gdi_bitmap); cd->gdi_bitmap = NULL; } if (cd->frame_bitmap != NULL) { free(cd->frame_bitmap); cd->frame_bitmap = NULL; } if (cd->framebuffer != NULL) { free(cd->framebuffer); cd->framebuffer = NULL; } if (cd->last_frame != NULL) { free(cd->last_frame); cd->last_frame = NULL; } cd->last_frame_size = 0; /* Recreate the capturing window. */ DestroyWindow(cd->cap_window); cd->cap_window = capCreateCaptureWindow(cd->window_name, WS_CHILD, 0, 0, 0, 0, HWND_MESSAGE, 1); if (cd->cap_window != NULL) { /* Save capture window descriptor as window's user data. */ capSetUserData(cd->cap_window, cd); } } }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(caps)); CAPSTATUS status = {}; capGetStatus(hWndC, &status, sizeof(status)); ::SetWindowPos(hWndC, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; // 30 FPS capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); // Get frame initial parameters. const DWORD size = capGetVideoFormatSize(capWnd); if( size > 0 ) { unsigned char *pbi = new unsigned char[size]; if( pbi ) { if( capGetVideoFormat(capWnd, pbi, size) == size ) { BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader; widthSet = vfmt.biWidth; heightSet = vfmt.biHeight; fourcc = vfmt.biCompression; } delete []pbi; } } // And alternative way in case of failure. if( widthSet == 0 || heightSet == 0 ) { widthSet = status.uiImageWidth; heightSet = status.uiImageHeight; } } return capWnd != 0; }