void CSoCProjectView::OnDraw(CDC* pDC) { CSoCProjectDoc*pDoc = GetDocument(); ASSERT_VALID(pDoc); if (!pDoc) return; ClearRect(pDC, RGB(255,255,255)); // TODO: 여기에 원시 데이터에 대한 그리기 코드를 추가합니다. CAPTUREPARMS cp; capCaptureGetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); // get the current defaults cp.dwRequestMicroSecPerFrame = 1; // Set desired frame rate cp.fMakeUserHitOKToCapture = FALSE; cp.fYield = TRUE; // we want capture on a background thread. cp.wNumVideoRequested = (WORD) 1; // we may get less than this - no problem cp.fCaptureAudio = FALSE; cp.vKeyAbort = 0; // If no key is provided, it won't stop... cp.fAbortLeftMouse = FALSE; cp.fAbortRightMouse = FALSE; cp.fLimitEnabled = FALSE; // we want to stop cp.fMCIControl = FALSE; capCaptureSetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); }
/* 开始捕获预览视频 */ void CMainFrame::OnVfwPreviewvideo() { // TODO: 在此添加命令处理程序代码 CAPTUREPARMS CapParms; bPreview =! bPreview; if(bPreview){ capCaptureGetSetup(m_hWndCap,&CapParms,sizeof(CAPTUREPARMS)); CapParms.dwIndexSize=324000; CapParms.fMakeUserHitOKToCapture=!CapParms.fMCIControl; CapParms.wPercentDropForError=100; CapParms.wNumVideoRequested=5; CapParms.wChunkGranularity=0; CapParms.fYield=TRUE; CapParms.fCaptureAudio=FALSE; CapParms.vKeyAbort=0; CapParms.fAbortLeftMouse=CapParms.fAbortRightMouse=FALSE; CapParms.dwRequestMicroSecPerFrame=1000000/25; capSetCallbackOnYield(m_hWndCap,NULL); capCaptureSetSetup(m_hWndCap,&CapParms,sizeof(CAPTUREPARMS)); capCaptureSequenceNoFile(m_hWndCap); m_vfwState = PREVIEW; }else{ capCaptureAbort(m_hWndCap); } }
int cap_configure(cap_cx *cx, char *err) { CAPTUREPARMS params; if(capCaptureGetSetup(cx->hwnd, ¶ms, sizeof(params))) { params.fYield = TRUE; params.fCaptureAudio = FALSE; params.wPercentDropForError = 100; params.fMakeUserHitOKToCapture = FALSE; params.fAbortLeftMouse = FALSE; params.fAbortRightMouse = FALSE; params.vKeyAbort = 0; if(capCaptureSetSetup(cx->hwnd, ¶ms, sizeof(params))) { if(capSetCallbackOnVideoStream(cx->hwnd, cap_stream_cb)) { return 1; } } } set_err("Can't set capture settings"); return 0; }
// // Setup required capture parameters. // Do it at once! BOOL CAviCap::SetCapSetup(CAPTUREPARMS *parms) { CHECKWIN(); CHECKCNCT(); if(!parms) return FALSE; if(!capCaptureSetSetup (GetSafeHwnd(), parms, sizeof CAPTUREPARMS)|| !_getSetup()||!_getFormat()) return FALSE; return TRUE; }
void VideoGrabber::Init() { mGrabNextFrame = FALSE; mPreviousFrameExists = FALSE; // Setup capture window and connect webcam driver camhwnd = capCreateCaptureWindow (_T("Ahtung!"), 0 , 0, 0, FRAME_WIDTH, FRAME_HEIGHT, 0, 0); SendMessage(camhwnd, WM_CAP_DRIVER_CONNECT, 0, 0); capSetCallbackOnFrame(camhwnd, FrameCallbackProc); capSetCallbackOnVideoStream(camhwnd, FrameCallbackProc); // Use same callback function, consider mGrabNextFrame flag! capSetUserData(camhwnd, this); // Callback functions may use pointer to this VideoGrabber if (mPreviousFrame != NULL) { delete[] mPreviousFrame; mPreviousFrame = NULL; } mMotionDetectedDuringLastSecond = FALSE; // TODO: Use MPEGLAYER3WAVEFORMAT instead this // Setup audio params WAVEFORMATEX wfex; wfex.wFormatTag = WAVE_FORMAT_PCM; wfex.nChannels = 1; // Use mono wfex.nSamplesPerSec = 8000; wfex.nAvgBytesPerSec = 8000; wfex.nBlockAlign = 1; wfex.wBitsPerSample = 8; wfex.cbSize = 0; capSetAudioFormat(camhwnd, &wfex, sizeof(WAVEFORMATEX)); // Setup video capturing and streaming CAPTUREPARMS parms; capCaptureGetSetup(camhwnd, &parms, sizeof(CAPTUREPARMS)); parms.fAbortLeftMouse = FALSE; parms.wPercentDropForError = 100; // Never abort capturing in case of dropped frames parms.fAbortRightMouse = FALSE; //parms.fLimitEnabled = TRUE; //parms.wTimeLimit = 0; // TODO! parms.fYield = TRUE; // TODO! capCaptureSetSetup(camhwnd, &parms, sizeof(parms)); // !!! capSetCallbackOnError(camhwnd, capErrorCallback); // Resume thread for motion detection mListenerHandle = CreateThread(0, 0, ListeningRoutine, this, CREATE_SUSPENDED, &mThreadID); SetEnabled(TRUE); ResumeThread(mListenerHandle); }
static int vfw_init (zbar_video_t *vdo, uint32_t fmt) { if(vfw_set_format(vdo, fmt)) return(-1); HWND hwnd = vdo->state->hwnd; CAPTUREPARMS cp; if(!capCaptureGetSetup(hwnd, &cp, sizeof(cp))) return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_WINAPI, __func__, "retrieving capture parameters")); cp.dwRequestMicroSecPerFrame = 33333; cp.fMakeUserHitOKToCapture = 0; cp.wPercentDropForError = 90; cp.fYield = 1; cp.wNumVideoRequested = vdo->num_images; cp.fCaptureAudio = 0; cp.vKeyAbort = 0; cp.fAbortLeftMouse = 0; cp.fAbortRightMouse = 0; cp.fLimitEnabled = 0; if(!capCaptureSetSetup(hwnd, &cp, sizeof(cp))) return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_WINAPI, __func__, "setting capture parameters")); if(!capCaptureGetSetup(hwnd, &cp, sizeof(cp))) return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_WINAPI, __func__, "checking capture parameters")); /* ignore errors since we skipped checking fHasOverlay */ capOverlay(hwnd, 0); if(!capPreview(hwnd, 0) || !capPreviewScale(hwnd, 0)) err_capture(vdo, SEV_WARNING, ZBAR_ERR_WINAPI, __func__, "disabling preview"); if(!capSetCallbackOnVideoStream(hwnd, vfw_stream_cb) || !capSetCallbackOnError(hwnd, vfw_error_cb)) return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_BUSY, __func__, "setting capture callbacks")); vdo->num_images = cp.wNumVideoRequested; vdo->iomode = VIDEO_MMAP; /* driver provides "locked" buffers */ zprintf(3, "initialized video capture: %d buffers %ldms/frame\n", vdo->num_images, cp.dwRequestMicroSecPerFrame); return(0); }
void CWinVideoGrabber::SetAudioCapture(Bool p_value) { if (!inited) return; CAPTUREPARMS t_parameters; capCaptureGetSetup(videowindow, &t_parameters, sizeof(CAPTUREPARMS)); t_parameters . fCaptureAudio = p_value; BOOL t_result; t_result = capCaptureSetSetup(videowindow, &t_parameters, sizeof(CAPTUREPARMS)); }
void CWinVideoGrabber::StartRecording(char *filename) { if (!inited) return; StopRecording(); //StopPreviewing(); CAPTUREPARMS CaptureParms; capCaptureGetSetup(videowindow, &CaptureParms, sizeof(CAPTUREPARMS)); CaptureParms.fYield = True; CaptureParms.fAbortLeftMouse = False; CaptureParms.fAbortRightMouse = False; capCaptureSetSetup(videowindow, &CaptureParms, sizeof (CAPTUREPARMS)); capFileSetCaptureFile(videowindow, filename); capCaptureSequence(videowindow); videomode = VIDEOGRABBERMODE_RECORDING; }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(&caps)); ::MoveWindow( hWndC, 0, 0, 320, 240, TRUE ); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); } return capWnd != 0; }
void MMCapture::captureInit(UINT framesPerSecond, UINT audioBufferSize) { DWORD style = WS_CHILD; m_captureWindow = capCreateCaptureWindow(_T("my capture window"), style,0,0,640,480,m_receiver.getWindow(),1); if(m_captureWindow == NULL) { throwException(_T("%s:Cannot create CaptureWindow:%s"),__TFUNCTION__,getLastErrorText().cstr()); } try { CHECKRESULT(capSetUserData( m_captureWindow, this)); if(captureVideo()) { CHECKRESULT(capDriverConnect(m_captureWindow, 0 )); m_webCamConnected = true; } CAPTUREPARMS param; CHECKRESULT(capCaptureGetSetup(m_captureWindow,¶m,sizeof(param))); param.dwRequestMicroSecPerFrame = 1000000 / framesPerSecond; param.fYield = TRUE; param.AVStreamMaster = AVSTREAMMASTER_AUDIO; // AVSTREAMMASTER_NONE; param.dwAudioBufferSize = audioBufferSize; CHECKRESULT(capCaptureSetSetup(m_captureWindow,¶m,sizeof(param))); if(captureAudio()) { int audioFormatSize = capGetAudioFormat(m_captureWindow,&m_audioFormat, sizeof(m_audioFormat)); CHECKRESULT(capSetCallbackOnWaveStream( m_captureWindow, captureWaveStreamCallback)); } if(captureVideo()) { int videoFormatSize = capGetVideoFormat(m_captureWindow,&m_videoFormat, sizeof(m_videoFormat)); CHECKRESULT(capSetCallbackOnVideoStream(m_captureWindow, captureVideoStreamCallback)); CHECKRESULT(capSetCallbackOnFrame( m_captureWindow, captureFrameCallback)); } CHECKRESULT(capSetCallbackOnStatus( m_captureWindow, captureStatusCallback)); CHECKRESULT(capSetCallbackOnCapControl( m_captureWindow, captureControlCallback)); CHECKRESULT(capSetCallbackOnError( m_captureWindow, captureErrorCallback)); if(captureAudio() && m_playAudio) { m_audioThread = new AudioPlayerThread(*this); TRACE_NEW(m_audioThread); m_audioThread->start(); } } catch(...) { captureCleanup(); throw; } }
void CWinVideoGrabber::SetFrameRate(int p_rate) { if (!inited) return; CAPTUREPARMS t_parameters; capCaptureGetSetup(videowindow, &t_parameters, sizeof(CAPTUREPARMS)); // p_rate is assumed to be in frames per second. We need to convert this to Microseconds per frame for Windows video. DWORD t_microsecs_per_frame; t_microsecs_per_frame = 1000000 / p_rate; t_parameters . dwRequestMicroSecPerFrame = t_microsecs_per_frame; BOOL t_result; t_result = capCaptureSetSetup(videowindow, &t_parameters, sizeof(CAPTUREPARMS)); }
static int _vfw_engine_setup(VfwEngine *obj){ CAPTUREPARMS capparam ; capCaptureGetSetup(obj->capvideo,&capparam,sizeof(capparam)) ; capparam.dwRequestMicroSecPerFrame = 33000 ; /*makes around 30fps*/ // detach capture from application capparam.fYield = TRUE ; capparam.fMakeUserHitOKToCapture = FALSE; capparam.fAbortLeftMouse = FALSE; capparam.fAbortRightMouse = FALSE; capparam.wPercentDropForError = 90 ; capparam.fCaptureAudio = FALSE ; capparam.fAbortRightMouse = FALSE; capparam.fAbortLeftMouse = FALSE; capparam.AVStreamMaster = AVSTREAMMASTER_NONE ; if (!capCaptureSetSetup(obj->capvideo,&capparam,sizeof(capparam))){ ms_error("capCaptureSetSetup failed."); return -1; } capSetUserData(obj->capvideo, obj); return 0; }
//General connection method BOOL CAviCap::_connect2Driver(int indx) { CHECKWIN(); if(indx<0||indx>_totalDrv()) { TRACE("CAviCap Window connection failed: illegal driver index\n"); iLastError=CAP_ILLEGAL_DRIVERID; return FALSE; } AfxGetApp( )-> DoWaitCursor( 1 ); //connect via VFW BOOL ret=capDriverConnect(GetSafeHwnd(), indx); checkQuit(); if(ret) { //starts gathering of driver information _curDriver=indx; if(!_getCaps()|| !_getSetup()|| !_getStatus()|| !_getFormat()) { iLastError=CAP_INIT_FAILED; ret=FALSE; } else { //try to make window unvisible befor testing BOOL vsbl=IsWindowVisible(); if(vsbl) ShowWindow(SW_HIDE); //Slow or fast connection? if(!m_DoQuickConnection) { //OK. We have enough time to test driver :-) UP_THR(); #ifdef ON_CONNECT_TEST_BIBITSCOUNT _testBiBitsCount(); #endif checkQuit(); #ifdef ON_CONNECT_CHECK_VALIDFORMATS _testValidFormats(); #endif checkQuit(); #ifdef DEFAULT_FORCE_SINGLE_FRAME _testInternalBuffers(); #endif checkQuit(); DN_THR(); } //else skip testing // Setup default capture parameters // OPTIONAL #ifdef DEFAULT_FORCE_SINGLE_FRAME capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_1FrameCallbackProc); _1FrameCallBackInstalled = TRUE; #else capSetCallbackOnFrame(GetSafeHwnd(), _defaultFrameCallbackProc); capPreviewRate(GetSafeHwnd(), _previewRate); #endif #ifdef DEFAULT_USED_DOSMEMORY _captureParms.wNumVideoRequested=1; _captureParms.fUsingDOSMemory=TRUE; #endif #ifdef DEFAULT_STEP_CAPTUREAT2X _captureParms.fStepCaptureAt2x=TRUE; _captureParms.wStepCaptureAverageFrames=3; #endif _captureParms.dwRequestMicroSecPerFrame=20000; _captureParms.fYield = TRUE; capCaptureSetSetup (GetSafeHwnd(), &_captureParms, sizeof CAPTUREPARMS); capCaptureGetSetup (GetSafeHwnd(), &_captureParms, sizeof CAPTUREPARMS); if(vsbl) ShowWindow(SW_SHOW); //restore }//end else } else { AfxGetApp( )-> DoWaitCursor( 0 ); TRACE("Connection to Capure Driver Failed\n"); iLastError=CAP_CONNECTION_FAILED; return FALSE; } if(ret) TRACE("Connected to <%s: %s>\n",GetDriverName(),GetDriverVer()); else TRACE("Connection (phase 2) to Capure Driver Failed\n"); if(!ret) Disconnect(); return ret; }
void CSoCProjectView::OnInitialUpdate() { CScrollView::OnInitialUpdate(); CSize sizeTotal; // TODO: 이 뷰의 전체 크기를 계산합니다. sizeTotal.cx = 640; sizeTotal.cy = 480; SetScrollSizes(MM_TEXT, sizeTotal); CSoCProjectDoc*pDoc = GetDocument(); // TODO: 여기에 생성 코드를 추가합니다. RECT r; GetClientRect(&r); pDoc->m_hCamWnd = capCreateCaptureWindow( _T("Capture Window"), WS_CHILD | WS_VISIBLE, 5, 5, r.right-5, r.bottom-5, this->m_hWnd, NULL); // 설치된 디바이스를 순서로 0 ~ 9까지의 카메라를 지정 할 수 있다. if(!capDriverConnect(pDoc->m_hCamWnd, 0)) AfxMessageBox(_T("웹캠 인식 실패 ㅠㅠ")); // 현재 드라이버 정보에 관한 내용 얻어오기 capDriverGetCaps(pDoc->m_hCamWnd, &pDoc->m_psCapsInfo, sizeof(pDoc->m_psCapsInfo)); //비디오 포맷 변환을 지원하는지 확인한다. if(pDoc->m_psCapsInfo.fHasDlgVideoFormat) { // 비디오 포맷 변환을 지원하면 아래 함수를 호출한다. // 호출되는 함수는 새로운 다이얼로그를 호출하고 해상도와 포맷형식, 프레임 버퍼크기등을 지정할 수 있다. // 이때, 지원되지 않는 비디오포멧을 설정하면 검정 화면을 보게될 것이야... capDlgVideoFormat(pDoc->m_hCamWnd); } // m_psCapsInfo.fHasOverlay에서 overlay가 지원이 되지 않으면(=0) 사용 할 수 없다. if(pDoc->m_psCapsInfo.fHasOverlay) { // 하드웨어 오버레이는 시스템 부하를 줄여준다.(optioinal) capOverlay(pDoc->m_hCamWnd, FALSE); } // BITMAPINFO 설정 capGetVideoFormat(pDoc->m_hCamWnd, &pDoc->m_BTMInfo, sizeof(pDoc->m_BTMInfo)); // 1/1000 단위로 영상이 출력된다. capPreviewRate(pDoc->m_hCamWnd, 1); // 프리뷰 영상을 재생한다. capPreview(pDoc->m_hCamWnd, TRUE); CAPTUREPARMS cp; capCaptureGetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); // get the current defaults cp.dwRequestMicroSecPerFrame = 1; // Set desired frame rate cp.fMakeUserHitOKToCapture = FALSE; cp.fYield = TRUE; // we want capture on a background thread. cp.wNumVideoRequested = (WORD) 1; // we may get less than this - no problem cp.fCaptureAudio = FALSE; cp.vKeyAbort = 0; // If no key is provided, it won't stop... cp.fAbortLeftMouse = FALSE; cp.fAbortRightMouse = FALSE; cp.fLimitEnabled = FALSE; // we want to stop cp.fMCIControl = FALSE; capCaptureSetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); capSetCallbackOnVideoStream(pDoc->m_hCamWnd, VideoCallbackProc); capSetCallbackOnFrame(pDoc->m_hCamWnd, VideoCallbackProc); }
int camera_device_start_capturing(CameraDevice* cd, uint32_t pixel_format, int frame_width, int frame_height) { WndCameraDevice* wcd; HBITMAP bm_handle; BITMAP bitmap; size_t format_info_size; CAPTUREPARMS cap_param; if (cd == NULL || cd->opaque == NULL) { E("%s: Invalid camera device descriptor", __FUNCTION__); return -1; } wcd = (WndCameraDevice*)cd->opaque; /* wcd->dc is an indicator of capturing: !NULL - capturing, NULL - not */ if (wcd->dc != NULL) { W("%s: Capturing is already on on device '%s'", __FUNCTION__, wcd->window_name); return 0; } /* Connect capture window to the video capture driver. */ if (!capDriverConnect(wcd->cap_window, wcd->input_channel)) { return -1; } /* Get current frame information from the driver. */ format_info_size = capGetVideoFormatSize(wcd->cap_window); if (format_info_size == 0) { E("%s: Unable to get video format size: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } wcd->frame_bitmap = (BITMAPINFO*)malloc(format_info_size); if (wcd->frame_bitmap == NULL) { E("%s: Unable to allocate frame bitmap info buffer", __FUNCTION__); _camera_device_reset(wcd); return -1; } if (!capGetVideoFormat(wcd->cap_window, wcd->frame_bitmap, format_info_size)) { E("%s: Unable to obtain video format: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } /* Lets see if we need to set different frame dimensions */ if (wcd->frame_bitmap->bmiHeader.biWidth != frame_width || abs(wcd->frame_bitmap->bmiHeader.biHeight) != frame_height) { /* Dimensions don't match. Set new frame info. */ wcd->frame_bitmap->bmiHeader.biWidth = frame_width; wcd->frame_bitmap->bmiHeader.biHeight = frame_height; /* We need to recalculate image size, since the capture window / driver * will use image size provided by us. */ if (wcd->frame_bitmap->bmiHeader.biBitCount == 24) { /* Special case that may require WORD boundary alignment. */ uint32_t bpl = (frame_width * 3 + 1) & ~1; wcd->frame_bitmap->bmiHeader.biSizeImage = bpl * frame_height; } else { wcd->frame_bitmap->bmiHeader.biSizeImage = (frame_width * frame_height * wcd->frame_bitmap->bmiHeader.biBitCount) / 8; } if (!capSetVideoFormat(wcd->cap_window, wcd->frame_bitmap, format_info_size)) { E("%s: Unable to set video format: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } } if (wcd->frame_bitmap->bmiHeader.biCompression > BI_PNG) { D("%s: Video capturing driver has reported pixel format %.4s", __FUNCTION__, (const char*)&wcd->frame_bitmap->bmiHeader.biCompression); } /* Most of the time frame bitmaps come in "bottom-up" form, where its origin * is the lower-left corner. However, it could be in the normal "top-down" * form with the origin in the upper-left corner. So, we must adjust the * biHeight field, since the way "top-down" form is reported here is by * setting biHeight to a negative value. */ if (wcd->frame_bitmap->bmiHeader.biHeight < 0) { wcd->frame_bitmap->bmiHeader.biHeight = -wcd->frame_bitmap->bmiHeader.biHeight; wcd->is_top_down = 1; } else { wcd->is_top_down = 0; } /* Get DC for the capturing window that will be used when we deal with * bitmaps obtained from the camera device during frame capturing. */ wcd->dc = GetDC(wcd->cap_window); if (wcd->dc == NULL) { E("%s: Unable to obtain DC for %s: %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_reset(wcd); return -1; } /* Setup some capture parameters. */ if (capCaptureGetSetup(wcd->cap_window, &cap_param, sizeof(cap_param))) { /* Use separate thread to capture video stream. */ cap_param.fYield = TRUE; /* Don't show any dialogs. */ cap_param.fMakeUserHitOKToCapture = FALSE; capCaptureSetSetup(wcd->cap_window, &cap_param, sizeof(cap_param)); } /* * At this point we need to grab a frame to properly setup framebuffer, and * calculate pixel format. The problem is that bitmap information obtained * from the driver doesn't necessarily match the actual bitmap we're going to * obtain via capGrabFrame / capEditCopy / GetClipboardData */ /* Grab a frame, and post it to the clipboard. Not very effective, but this * is how capXxx API is operating. */ if (!capGrabFrameNoStop(wcd->cap_window) || !capEditCopy(wcd->cap_window) || !OpenClipboard(wcd->cap_window)) { E("%s: Device '%s' is unable to save frame to the clipboard: %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_reset(wcd); return -1; } /* Get bitmap handle saved into clipboard. Note that bitmap is still * owned by the clipboard here! */ bm_handle = (HBITMAP)GetClipboardData(CF_BITMAP); if (bm_handle == NULL) { E("%s: Device '%s' is unable to obtain frame from the clipboard: %d", __FUNCTION__, wcd->window_name, GetLastError()); CloseClipboard(); _camera_device_reset(wcd); return -1; } /* Get bitmap object that is initialized with the actual bitmap info. */ if (!GetObject(bm_handle, sizeof(BITMAP), &bitmap)) { E("%s: Device '%s' is unable to obtain frame's bitmap: %d", __FUNCTION__, wcd->window_name, GetLastError()); EmptyClipboard(); CloseClipboard(); _camera_device_reset(wcd); return -1; } /* Now that we have all we need in 'bitmap' */ EmptyClipboard(); CloseClipboard(); /* Make sure that dimensions match. Othewise - fail. */ if (wcd->frame_bitmap->bmiHeader.biWidth != bitmap.bmWidth || wcd->frame_bitmap->bmiHeader.biHeight != bitmap.bmHeight ) { E("%s: Requested dimensions %dx%d do not match the actual %dx%d", __FUNCTION__, frame_width, frame_height, wcd->frame_bitmap->bmiHeader.biWidth, wcd->frame_bitmap->bmiHeader.biHeight); _camera_device_reset(wcd); return -1; } /* Create bitmap info that will be used with GetDIBits. */ wcd->gdi_bitmap = (BITMAPINFO*)malloc(wcd->frame_bitmap->bmiHeader.biSize); if (wcd->gdi_bitmap == NULL) { E("%s: Unable to allocate gdi bitmap info", __FUNCTION__); _camera_device_reset(wcd); return -1; } memcpy(wcd->gdi_bitmap, wcd->frame_bitmap, wcd->frame_bitmap->bmiHeader.biSize); wcd->gdi_bitmap->bmiHeader.biCompression = BI_RGB; wcd->gdi_bitmap->bmiHeader.biBitCount = bitmap.bmBitsPixel; wcd->gdi_bitmap->bmiHeader.biSizeImage = bitmap.bmWidthBytes * bitmap.bmWidth; /* Adjust GDI's bitmap biHeight for proper frame direction ("top-down", or * "bottom-up") We do this trick in order to simplify pixel format conversion * routines, where we always assume "top-down" frames. The trick he is to * have negative biHeight in 'gdi_bitmap' if driver provides "bottom-up" * frames, and positive biHeight in 'gdi_bitmap' if driver provides "top-down" * frames. This way GetGDIBits will always return "top-down" frames. */ if (wcd->is_top_down) { wcd->gdi_bitmap->bmiHeader.biHeight = wcd->frame_bitmap->bmiHeader.biHeight; } else { wcd->gdi_bitmap->bmiHeader.biHeight = -wcd->frame_bitmap->bmiHeader.biHeight; } /* Allocate framebuffer. */ wcd->framebuffer = (uint8_t*)malloc(wcd->gdi_bitmap->bmiHeader.biSizeImage); if (wcd->framebuffer == NULL) { E("%s: Unable to allocate %d bytes for framebuffer", __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biSizeImage); _camera_device_reset(wcd); return -1; } /* Lets see what pixel format we will use. */ if (wcd->gdi_bitmap->bmiHeader.biBitCount == 16) { wcd->pixel_format = V4L2_PIX_FMT_RGB565; } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 24) { wcd->pixel_format = V4L2_PIX_FMT_BGR24; } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 32) { wcd->pixel_format = V4L2_PIX_FMT_BGR32; } else { E("%s: Unsupported number of bits per pixel %d", __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biBitCount); _camera_device_reset(wcd); return -1; } D("%s: Capturing device '%s': %d bits per pixel in %.4s [%dx%d] frame", __FUNCTION__, wcd->window_name, wcd->gdi_bitmap->bmiHeader.biBitCount, (const char*)&wcd->pixel_format, wcd->frame_bitmap->bmiHeader.biWidth, wcd->frame_bitmap->bmiHeader.biHeight); /* Try to setup capture frame callback. */ wcd->use_clipboard = 1; if (capSetCallbackOnFrame(wcd->cap_window, _on_captured_frame)) { /* Callback is set. Don't use clipboard when capturing frames. */ wcd->use_clipboard = 0; } return 0; }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(caps)); CAPSTATUS status = {}; capGetStatus(hWndC, &status, sizeof(status)); ::SetWindowPos(hWndC, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; // 30 FPS capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); // Get frame initial parameters. const DWORD size = capGetVideoFormatSize(capWnd); if( size > 0 ) { unsigned char *pbi = new unsigned char[size]; if( pbi ) { if( capGetVideoFormat(capWnd, pbi, size) == size ) { BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader; widthSet = vfmt.biWidth; heightSet = vfmt.biHeight; fourcc = vfmt.biCompression; } delete []pbi; } } // And alternative way in case of failure. if( widthSet == 0 || heightSet == 0 ) { widthSet = status.uiImageWidth; heightSet = status.uiImageHeight; } } return capWnd != 0; }
bool CvCaptureCAM_VFW::setProperty(int property_id, double value) { bool handledSize = false; switch( property_id ) { case CV_CAP_PROP_FRAME_WIDTH: width = cvRound(value); handledSize = true; break; case CV_CAP_PROP_FRAME_HEIGHT: height = cvRound(value); handledSize = true; break; case CV_CAP_PROP_FOURCC: break; case CV_CAP_PROP_FPS: if( value > 0 ) { CAPTUREPARMS params; if( capCaptureGetSetup(capWnd, ¶ms, sizeof(params)) ) { params.dwRequestMicroSecPerFrame = cvRound(1e6/value); return capCaptureSetSetup(capWnd, ¶ms, sizeof(params)) == TRUE; } } break; default: break; } if ( handledSize ) { // If both width and height are set then change frame size. if( width > 0 && height > 0 ) { const DWORD size = capGetVideoFormatSize(capWnd); if( size == 0 ) return false; unsigned char *pbi = new unsigned char[size]; if( !pbi ) return false; if( capGetVideoFormat(capWnd, pbi, size) != size ) { delete []pbi; return false; } BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader; bool success = true; if( width != vfmt.biWidth || height != vfmt.biHeight ) { // Change frame size. vfmt.biWidth = width; vfmt.biHeight = height; vfmt.biSizeImage = height * ((width * vfmt.biBitCount + 31) / 32) * 4; vfmt.biCompression = BI_RGB; success = capSetVideoFormat(capWnd, pbi, size) == TRUE; } if( success ) { // Adjust capture window size. CAPSTATUS status = {}; capGetStatus(capWnd, &status, sizeof(status)); ::SetWindowPos(capWnd, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE); // Store frame size. widthSet = width; heightSet = height; } delete []pbi; width = height = -1; return success; } return true; } return false; }
long __stdcall DlgProc ( HWND hWnd , unsigned msg , unsigned wParam , long lParam ) { switch(msg) { case WM_INITDIALOG: //hEdit = GetDlgItem( hWnd , I_EDIT ); //GetClientRect( hEdit , &rect ); hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, 320, 240, hWnd, 1235 ); //hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, (rect.right-rect.left ), (rect.bottom-rect.top), hEdit, 1235); // вручную заполняем структуру CapVar ZeroMemory( &CapVar, sizeof(COMPVARS) ); CapVar.cbSize = sizeof(COMPVARS); CapVar.dwFlags = ICMF_COMPVARS_VALID; CapVar.cbState = 0; CapVar.fccHandler = mmioFOURCC( 'x', '2', '6', '4' ); CapVar.fccType = ICTYPE_VIDEO; // открываем декомпрессор (долго) CapVar.hic = ICOpen( ICTYPE_VIDEO, CapVar.fccHandler, ICMODE_COMPRESS ); hThread = CreateThread( NULL, 0, (LPTHREAD_START_ROUTINE)SendThread, NULL, 0, 0 ); return -1 ; case WM_COMMAND: switch(LOWORD(wParam)) { case I_BUTTON_CONN : if( !capDriverConnect( hWndCap, 0 ) ) { EndDialog ( hWnd, 0 ); return -1; } capCaptureGetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); CapParms.dwRequestMicroSecPerFrame = 66000; CapParms.fLimitEnabled = FALSE; CapParms.fCaptureAudio = FALSE; CapParms.fMCIControl = FALSE; CapParms.fYield = TRUE; CapParms.vKeyAbort = VK_ESCAPE; CapParms.fAbortLeftMouse = FALSE; CapParms.fAbortRightMouse = FALSE; capCaptureSetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); capPreviewScale( hWndCap, 1 ); capPreviewRate( hWndCap, 66 ); capPreviewScale( hWndCap, FALSE ); capPreview( hWndCap, 1 ); //added by jimmy // OPTIONAL STEP: Setup resolution capGetVideoFormat( hWndCap, &InputBmpInfo ,sizeof(InputBmpInfo) ); //InputBmpInfo.bmiHeader.biWidth = 320; //(rect.right-rect.left ); //InputBmpInfo.bmiHeader.biHeight = 240; //(rect.bottom-rect.top); //InputBmpInfo.bmiHeader.biBitCount = 24; capSetVideoFormat( hWndCap, &InputBmpInfo, sizeof(InputBmpInfo) ); //capDriverDisconnect (hWndCap, 0);//Can we do better? //capDriverConnect (hWndCap, 0); capSetCallbackOnFrame( hWndCap, FrameCallBack ); if(CapVar.hic > 0 ) { OutFormatSize = ICCompressGetFormatSize( CapVar.hic, &InputBmpInfo.bmiHeader ); // BITMAPINFO возвращает размер структуры исходных данных InputBmpInfo ICCompressGetFormat( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // заполняет структуру получаемых данных OutputBmpInfo OutBufferSize = ICCompressGetSize( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // максимальный размер одного сжатого кадра (полученного) ICSeqCompressFrameStart( &CapVar, &InputBmpInfo ); // начало сжатия } break; case I_BUTTON_EXIT : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; break; } return -1 ; case WM_CLOSE : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; return -1 ; } return 0 ; }