/* Resets camera device after capturing. * Since new capture request may require different frame dimensions we must * reset frame info cached in the capture window. The only way to do that would * be closing, and reopening it again. */ static void _camera_device_reset(WndCameraDevice* cd) { if (cd != NULL && cd->cap_window != NULL) { capDriverDisconnect(cd->cap_window); if (cd->dc != NULL) { ReleaseDC(cd->cap_window, cd->dc); cd->dc = NULL; } if (cd->gdi_bitmap != NULL) { free(cd->gdi_bitmap); cd->gdi_bitmap = NULL; } if (cd->frame_bitmap != NULL) { free(cd->frame_bitmap); cd->frame_bitmap = NULL; } if (cd->framebuffer != NULL) { free(cd->framebuffer); cd->framebuffer = NULL; } /* Recreate the capturing window. */ DestroyWindow(cd->cap_window); cd->cap_window = capCreateCaptureWindow(cd->window_name, WS_CHILD, 0, 0, 0, 0, HWND_MESSAGE, 1); } }
bool OS::IssetWebCam() { HWND hWnd = capCreateCaptureWindow("CapWebCam", WS_CHILD, 0, 0, 0, 0, GetDesktopWindow(), 0); if(!SendMessage(hWnd, WM_CAP_DRIVER_CONNECT, 0, 0)) { DestroyWindow(hWnd); return false; } return true; }
//open sequence grabber component instance and draw to window CWinVideoGrabber::CWinVideoGrabber(HWND whichwindow) { parentwindow = whichwindow; videomode = VIDEOGRABBERMODE_NONE; videowindow = capCreateCaptureWindow("CAPTURE", WS_CHILD | WS_VISIBLE, 0, 0, 160, 120, (HWND) whichwindow, winid++); SendMessage (videowindow, WM_CAP_DRIVER_CONNECT, 0, 0L); capPreviewScale(videowindow,True); capPreviewRate(videowindow, 50); Init(); }
HWND cap_create_hwnd(char *err) { HWND hwnd; if((hwnd = capCreateCaptureWindow("", WS_POPUP, 0, 0, 0, 0, NULL, 0))) { return hwnd; } set_err("Can't create capture window"); return NULL; }
void VideoGrabber::Init() { mGrabNextFrame = FALSE; mPreviousFrameExists = FALSE; // Setup capture window and connect webcam driver camhwnd = capCreateCaptureWindow (_T("Ahtung!"), 0 , 0, 0, FRAME_WIDTH, FRAME_HEIGHT, 0, 0); SendMessage(camhwnd, WM_CAP_DRIVER_CONNECT, 0, 0); capSetCallbackOnFrame(camhwnd, FrameCallbackProc); capSetCallbackOnVideoStream(camhwnd, FrameCallbackProc); // Use same callback function, consider mGrabNextFrame flag! capSetUserData(camhwnd, this); // Callback functions may use pointer to this VideoGrabber if (mPreviousFrame != NULL) { delete[] mPreviousFrame; mPreviousFrame = NULL; } mMotionDetectedDuringLastSecond = FALSE; // TODO: Use MPEGLAYER3WAVEFORMAT instead this // Setup audio params WAVEFORMATEX wfex; wfex.wFormatTag = WAVE_FORMAT_PCM; wfex.nChannels = 1; // Use mono wfex.nSamplesPerSec = 8000; wfex.nAvgBytesPerSec = 8000; wfex.nBlockAlign = 1; wfex.wBitsPerSample = 8; wfex.cbSize = 0; capSetAudioFormat(camhwnd, &wfex, sizeof(WAVEFORMATEX)); // Setup video capturing and streaming CAPTUREPARMS parms; capCaptureGetSetup(camhwnd, &parms, sizeof(CAPTUREPARMS)); parms.fAbortLeftMouse = FALSE; parms.wPercentDropForError = 100; // Never abort capturing in case of dropped frames parms.fAbortRightMouse = FALSE; //parms.fLimitEnabled = TRUE; //parms.wTimeLimit = 0; // TODO! parms.fYield = TRUE; // TODO! capCaptureSetSetup(camhwnd, &parms, sizeof(parms)); // !!! capSetCallbackOnError(camhwnd, capErrorCallback); // Resume thread for motion detection mListenerHandle = CreateThread(0, 0, ListeningRoutine, this, CREATE_SUSPENDED, &mThreadID); SetEnabled(TRUE); ResumeThread(mListenerHandle); }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(&caps)); ::MoveWindow( hWndC, 0, 0, 320, 240, TRUE ); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); } return capWnd != 0; }
static int _vfw_engine_connect(VfwEngine *obj){ MSVideoSize sz; sz.width=MS_VIDEO_SIZE_CIF_W; sz.height=MS_VIDEO_SIZE_CIF_H; HWND hwnd=capCreateCaptureWindow("Capture Window",WS_CHILD /* WS_OVERLAPPED */ ,0,0,sz.width,sz.height,HWND_MESSAGE, 0) ; if (hwnd==NULL) return -1; if(!capDriverConnect(hwnd,obj->devidx)){ ms_warning("vfw: could not connect to capture driver, no webcam connected."); DestroyWindow(hwnd); return -1; } obj->capvideo=hwnd; obj->vsize=sz; return 0; }
void MMCapture::captureInit(UINT framesPerSecond, UINT audioBufferSize) { DWORD style = WS_CHILD; m_captureWindow = capCreateCaptureWindow(_T("my capture window"), style,0,0,640,480,m_receiver.getWindow(),1); if(m_captureWindow == NULL) { throwException(_T("%s:Cannot create CaptureWindow:%s"),__TFUNCTION__,getLastErrorText().cstr()); } try { CHECKRESULT(capSetUserData( m_captureWindow, this)); if(captureVideo()) { CHECKRESULT(capDriverConnect(m_captureWindow, 0 )); m_webCamConnected = true; } CAPTUREPARMS param; CHECKRESULT(capCaptureGetSetup(m_captureWindow,¶m,sizeof(param))); param.dwRequestMicroSecPerFrame = 1000000 / framesPerSecond; param.fYield = TRUE; param.AVStreamMaster = AVSTREAMMASTER_AUDIO; // AVSTREAMMASTER_NONE; param.dwAudioBufferSize = audioBufferSize; CHECKRESULT(capCaptureSetSetup(m_captureWindow,¶m,sizeof(param))); if(captureAudio()) { int audioFormatSize = capGetAudioFormat(m_captureWindow,&m_audioFormat, sizeof(m_audioFormat)); CHECKRESULT(capSetCallbackOnWaveStream( m_captureWindow, captureWaveStreamCallback)); } if(captureVideo()) { int videoFormatSize = capGetVideoFormat(m_captureWindow,&m_videoFormat, sizeof(m_videoFormat)); CHECKRESULT(capSetCallbackOnVideoStream(m_captureWindow, captureVideoStreamCallback)); CHECKRESULT(capSetCallbackOnFrame( m_captureWindow, captureFrameCallback)); } CHECKRESULT(capSetCallbackOnStatus( m_captureWindow, captureStatusCallback)); CHECKRESULT(capSetCallbackOnCapControl( m_captureWindow, captureControlCallback)); CHECKRESULT(capSetCallbackOnError( m_captureWindow, captureErrorCallback)); if(captureAudio() && m_playAudio) { m_audioThread = new AudioPlayerThread(*this); TRACE_NEW(m_audioThread); m_audioThread->start(); } } catch(...) { captureCleanup(); throw; } }
//--------------------------------------------------------------------------------------------------------------------------------------------------------------- //카메라 초기화. bool CWebCam::StartCam(CWnd* wnd, int nWidth, int nHeight, BITMAPINFO& bmpInfo) //wnd :캡처하는 영상이 나타날, 윈도우의 포인터를 넘겨받는다. // bmpInfo : 영상의 규격을 얻는것 , 크래스 외부에서 작업을 수행할 수 있다. { // 캡쳐 영상을 출력할 윈도우의 크기 및 위치 조절 m_hCam = capCreateCaptureWindow("Cam Capture", WS_CHILD | WS_VISIBLE, 0, 0, nWidth, nHeight, wnd->m_hWnd, NULL); // 유니코드 지원 여부에 따라 각각 함수 실행. 촬영하는 영상의 규격과는 상관이 없다. if (!capDriverConnect(m_hCam, 0)) //캡처 윈도우를 카메라와 연결한다. { AfxMessageBox("연결된 카메라를 찾을 수 없습니다."); return false; } // capDlgVideoFormat(m_hCam); capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO)); //캡처영상을 사용하려면 영상의 규격정보를 알아야한다. // 캡쳐 영상의 크기를 희망 값으로 설정 bmpInfo.bmiHeader.biWidth = nWidth; bmpInfo.bmiHeader.biHeight = nHeight; bmpInfo.bmiHeader.biSizeImage = nWidth*nHeight*bmpInfo.bmiHeader.biBitCount / 8; if (!capSetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO))) //카메라의 영상 규격(크기와 색 형식 정보등)을 설정한다. { // 설정에 실패하면 원래 값으로 되돌림 capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO)); } // 캡쳐 영상이 24비트 RGB가 될 수 있도록 설정 bmpInfo.bmiHeader.biBitCount = 24; bmpInfo.bmiHeader.biCompression = BI_RGB; if (!capSetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO))) { // 설정에 실패하면 원래 값으로 되돌림 capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO)); } capOverlay(m_hCam, TRUE); //그래픽 하드웨어를 이용할 것인지 결정. *오버레이를 활성화하면 미리보기가 자동으로 비활성화됨. capPreviewRate(m_hCam,30); //미리보기를 갱신하는 시간 간격을 설정. capPreview(m_hCam, TRUE); //미리보기를 활성화 할 것인지 결정. return true; }
/** * Set camera preview window */ static HWND camera_set_preview_window(javacall_handle handle, int x, int y, int w, int h, BOOL visible) { #define DEFAULT_CAPTURE_DRIVER 0 #define DEFAULT_PREVIEW_RATE 150 /* ms unit => Increase this value to optimize performance */ BOOL ret; camera_handle* pHandle = (camera_handle*)handle; DWORD wsVisible = TRUE == visible ? WS_VISIBLE : 0; camera_destroy_window(pHandle); JAVA_DEBUG_PRINT4("[camera] capCreateCaptureWindow %d %d %d %d\n", x, y, w, h); pHandle->hCapWnd = capCreateCaptureWindow(_T("Sun_Java_Cap_Window"), wsVisible | WS_CHILD | WS_CLIPSIBLINGS, x + X_SCREEN_OFFSET, y + Y_SCREEN_OFFSET + TOP_BAR_HEIGHT, w, h, GET_MCIWND_HWND(), 0xffff); JAVA_DEBUG_PRINT1("[camera] capCreateCaptureWindow %d\n", pHandle->hCapWnd); if (pHandle->hCapWnd) { ret = capDriverConnect(pHandle->hCapWnd, DEFAULT_CAPTURE_DRIVER); if (FALSE == ret) { JAVA_DEBUG_PRINT( "[camera] capDriverConnect fail - is there camera attached?\n"); DestroyWindow(pHandle->hCapWnd); pHandle->hCapWnd = NULL; return NULL; } capSetCallbackOnFrame(pHandle->hCapWnd, camera_grabber_callback); capPreviewScale(pHandle->hCapWnd, TRUE); capPreviewRate(pHandle->hCapWnd, DEFAULT_PREVIEW_RATE); } return pHandle->hCapWnd; }
BOOL capWebCam(char *szFile, int nIndex, int nX, int nY, int nMsg) { HWND hWndCap = capCreateCaptureWindow(NULL, WS_CHILD , 0, 0, nX, nY, GetDesktopWindow(), 0); if(!hWndCap) return FALSE; capDlgVideoSource(hWndCap); SetWindowLong(hWndCap,GWL_EXSTYLE,GetWindowLong(hWndCap,GWL_EXSTYLE)); ShowWindow(hWndCap,TRUE); capSendMessage(hWndCap, WM_CAP_DRIVER_DISCONNECT, 0, 0); capSendMessage(hWndCap, WM_CAP_DRIVER_CONNECT, 0, 0); capSendMessage(hWndCap, WM_CAP_SET_SCALE, TRUE, 0); capSendMessage(hWndCap, WM_CAP_SET_PREVIEWRATE, 1, 0); capSendMessage(hWndCap, WM_CAP_SET_PREVIEW, TRUE, 0); capSendMessage(hWndCap, WM_CAP_GRAB_FRAME_NOSTOP, 0, 0); capSendMessage(hWndCap, WM_CAP_FILE_SAVEDIB, 0, szFile); DestroyWindow(hWndCap); return TRUE; }
//Description: sets up an imaging device for capture operations //Input: digitizer ID number and a dummy string //Output: returns success (non-negative) or failure EXPORT long SetUp(long driver, long width, long height){ //reset the image and size variables pImage = NULL; dwSize = 0; //make sure we don't already have a window if(hWndC == NULL) { //create the capture window, the window is currently being draw in the top left corner //because the buffer is filled with nothing if it is not being drawn on the screen; //only a window size of 1x1 is needed to get this to work hWndC = capCreateCaptureWindow(TEXT("Webcam Capture Window"), WS_CHILD | WS_VISIBLE, 0, 0, 1, 1, GetDesktopWindow(), 0); } //connect the selected driver to the window if(!capDriverConnect(hWndC, driver)) { DestroyWindow(hWndC); return -1; } //create a frame capture callback function if(!capSetCallbackOnFrame(hWndC, capFrame)) { capDriverDisconnect(hWndC); DestroyWindow(hWndC); return -1; } //begin previewing capPreviewRate(hWndC, 66); if(!capPreview(hWndC, TRUE)) { capDriverDisconnect(hWndC); DestroyWindow(hWndC); return -1; } return 0; }
CameraDevice* camera_device_open(const char* name, int inp_channel) { WndCameraDevice* wcd; /* Allocate descriptor and initialize windows-specific fields. */ wcd = _camera_device_alloc(); if (wcd == NULL) { E("%s: Unable to allocate WndCameraDevice instance", __FUNCTION__); return NULL; } wcd->window_name = (name != NULL) ? ASTRDUP(name) : ASTRDUP(_default_window_name); if (wcd->window_name == NULL) { E("%s: Unable to save window name", __FUNCTION__); _camera_device_free(wcd); return NULL; } wcd->input_channel = inp_channel; /* Create capture window that is a child of HWND_MESSAGE window. * We make it invisible, so it doesn't mess with the UI. Also * note that we supply standard HWND_MESSAGE window handle as * the parent window, since we don't want video capturing * machinery to be dependent on the details of our UI. */ wcd->cap_window = capCreateCaptureWindow(wcd->window_name, WS_CHILD, 0, 0, 0, 0, HWND_MESSAGE, 1); if (wcd->cap_window == NULL) { E("%s: Unable to create video capturing window '%s': %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_free(wcd); return NULL; } /* Save capture window descriptor as window's user data. */ capSetUserData(wcd->cap_window, wcd); return &wcd->header; }
static ZTHREAD vfw_capture_thread (void *arg) { zbar_video_t *vdo = arg; video_state_t *state = vdo->state; zbar_thread_t *thr = &state->thread; state->hwnd = capCreateCaptureWindow(NULL, WS_POPUP, 0, 0, 1, 1, NULL, 0); if(!state->hwnd) goto done; _zbar_mutex_lock(&vdo->qlock); _zbar_thread_init(thr); zprintf(4, "spawned vfw capture thread (thr=%04lx)\n", _zbar_thread_self()); MSG msg; int rc = 0; while(thr->started && rc >= 0 && rc <= 1) { _zbar_mutex_unlock(&vdo->qlock); rc = MsgWaitForMultipleObjects(1, &thr->notify, 0, INFINITE, QS_ALLINPUT); if(rc == 1) while(PeekMessage(&msg, NULL, 0, 0, PM_NOYIELD | PM_REMOVE)) if(rc > 0) { TranslateMessage(&msg); DispatchMessage(&msg); } _zbar_mutex_lock(&vdo->qlock); } done: thr->running = 0; _zbar_event_trigger(&thr->activity); _zbar_mutex_unlock(&vdo->qlock); return(0); }
/*初始化VFW设备*/ void CMainFrame::OnVfwInitvfw() { // TODO: 在此添加命令处理程序代码 DWORD fsize; // 创建视频窗口 if(!m_wndSource.CreateEx(WS_EX_TOPMOST,NULL, _T("Source"),WS_OVERLAPPED|WS_CAPTION, CRect(0,0,352,288),NULL,0)) return; m_hWndCap = capCreateCaptureWindow(_T("Capture Window"),WS_CHILD|WS_VISIBLE, 0,0,352,288, m_wndSource.m_hWnd,0); //m_wndSource.ShowWindow(SW_SHOW); // 注册回调函数 capSetCallbackOnError(m_hWndCap,(FARPROC)ErrorCallbackProc); capSetCallbackOnStatus(m_hWndCap,(FARPROC)StatusCallbackProc); capSetCallbackOnVideoStream(m_hWndCap,(FARPROC)VideoCallbackProc); // 连接视频设备 capDriverConnect(m_hWndCap,0); //(HWND m_hWndCap, int index);//index : 0--9 // 获取驱动器的性能参数 capDriverGetCaps(m_hWndCap,&m_caps,sizeof(CAPDRIVERCAPS)); if (m_caps.fHasOverlay) capOverlay(m_hWndCap,TRUE); // 设置预览速率开始预览 capPreviewRate(m_hWndCap,1000/25); capPreview(m_hWndCap,bPreview); fsize = capGetVideoFormatSize(m_hWndCap); capGetVideoFormat(m_hWndCap, lpbiIn, fsize); AfxMessageBox(_T("初始化成功!")); }
/* Resets camera device after capturing. * Since new capture request may require different frame dimensions we must * reset frame info cached in the capture window. The only way to do that would * be closing, and reopening it again. */ static void _camera_device_reset(WndCameraDevice* cd) { if (cd != NULL && cd->cap_window != NULL) { capDriverDisconnect(cd->cap_window); if (cd->dc != NULL) { ReleaseDC(cd->cap_window, cd->dc); cd->dc = NULL; } if (cd->gdi_bitmap != NULL) { free(cd->gdi_bitmap); cd->gdi_bitmap = NULL; } if (cd->frame_bitmap != NULL) { free(cd->frame_bitmap); cd->frame_bitmap = NULL; } if (cd->framebuffer != NULL) { free(cd->framebuffer); cd->framebuffer = NULL; } if (cd->last_frame != NULL) { free(cd->last_frame); cd->last_frame = NULL; } cd->last_frame_size = 0; /* Recreate the capturing window. */ DestroyWindow(cd->cap_window); cd->cap_window = capCreateCaptureWindow(cd->window_name, WS_CHILD, 0, 0, 0, 0, HWND_MESSAGE, 1); if (cd->cap_window != NULL) { /* Save capture window descriptor as window's user data. */ capSetUserData(cd->cap_window, cd); } } }
bool fInitCamera (HWND hMainWnd, int iDeviceIndex) { // Create a capture window to capture the output from the camera // The capture window is not actually displayed (in this application) hgCapWnd = capCreateCaptureWindow(NULL, WS_CHILD, // window style 0, 0, 0, 0, // position and size hMainWnd, ID_CAP); if (!hgCapWnd) { Err("Can't open the camera display window"); return 0; } if (!capDriverConnect(hgCapWnd, iDeviceIndex)) { Err("Can't connect to the camera"); return false; } if (!capDriverGetCaps(hgCapWnd, &gDriverCaps, sizeof(CAPDRIVERCAPS))) { Err("Can't get capabilities of the camera"); return false; } if (!capPreview(hgCapWnd, FALSE)) // turn off preview { Err("capPreview FALSE failed"); return false; } if (!capGetStatus(hgCapWnd, &gCapStatus, sizeof(CAPSTATUS))) { Err("Can't get status of the camera"); return false; } PrintCapStatus(); return true; // success }
CVideoCap::CVideoCap() { // If FALSE, the system automatically resets the state to nonsignaled after a single waiting thread has been released. m_hCaptureEvent = CreateEvent(NULL, FALSE, FALSE, NULL); m_lpbmi = NULL; m_lpDIB = NULL; if (!IsWebCam() || m_bIsConnected) return; m_hWnd = CreateWindow("#32770", /* Dialog */ "", WS_POPUP, 0, 0, 0, 0, NULL, NULL, NULL, NULL); m_hWndCap = capCreateCaptureWindow ( "CVideoCap", WS_CHILD | WS_VISIBLE, 0, 0, 0, 0, m_hWnd, 0 ); }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(caps)); CAPSTATUS status = {}; capGetStatus(hWndC, &status, sizeof(status)); ::SetWindowPos(hWndC, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; // 30 FPS capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); // Get frame initial parameters. const DWORD size = capGetVideoFormatSize(capWnd); if( size > 0 ) { unsigned char *pbi = new unsigned char[size]; if( pbi ) { if( capGetVideoFormat(capWnd, pbi, size) == size ) { BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader; widthSet = vfmt.biWidth; heightSet = vfmt.biHeight; fourcc = vfmt.biCompression; } delete []pbi; } } // And alternative way in case of failure. if( widthSet == 0 || heightSet == 0 ) { widthSet = status.uiImageWidth; heightSet = status.uiImageHeight; } } return capWnd != 0; }
void CSoCProjectView::OnInitialUpdate() { CScrollView::OnInitialUpdate(); CSize sizeTotal; // TODO: 이 뷰의 전체 크기를 계산합니다. sizeTotal.cx = 640; sizeTotal.cy = 480; SetScrollSizes(MM_TEXT, sizeTotal); CSoCProjectDoc*pDoc = GetDocument(); // TODO: 여기에 생성 코드를 추가합니다. RECT r; GetClientRect(&r); pDoc->m_hCamWnd = capCreateCaptureWindow( _T("Capture Window"), WS_CHILD | WS_VISIBLE, 5, 5, r.right-5, r.bottom-5, this->m_hWnd, NULL); // 설치된 디바이스를 순서로 0 ~ 9까지의 카메라를 지정 할 수 있다. if(!capDriverConnect(pDoc->m_hCamWnd, 0)) AfxMessageBox(_T("웹캠 인식 실패 ㅠㅠ")); // 현재 드라이버 정보에 관한 내용 얻어오기 capDriverGetCaps(pDoc->m_hCamWnd, &pDoc->m_psCapsInfo, sizeof(pDoc->m_psCapsInfo)); //비디오 포맷 변환을 지원하는지 확인한다. if(pDoc->m_psCapsInfo.fHasDlgVideoFormat) { // 비디오 포맷 변환을 지원하면 아래 함수를 호출한다. // 호출되는 함수는 새로운 다이얼로그를 호출하고 해상도와 포맷형식, 프레임 버퍼크기등을 지정할 수 있다. // 이때, 지원되지 않는 비디오포멧을 설정하면 검정 화면을 보게될 것이야... capDlgVideoFormat(pDoc->m_hCamWnd); } // m_psCapsInfo.fHasOverlay에서 overlay가 지원이 되지 않으면(=0) 사용 할 수 없다. if(pDoc->m_psCapsInfo.fHasOverlay) { // 하드웨어 오버레이는 시스템 부하를 줄여준다.(optioinal) capOverlay(pDoc->m_hCamWnd, FALSE); } // BITMAPINFO 설정 capGetVideoFormat(pDoc->m_hCamWnd, &pDoc->m_BTMInfo, sizeof(pDoc->m_BTMInfo)); // 1/1000 단위로 영상이 출력된다. capPreviewRate(pDoc->m_hCamWnd, 1); // 프리뷰 영상을 재생한다. capPreview(pDoc->m_hCamWnd, TRUE); CAPTUREPARMS cp; capCaptureGetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); // get the current defaults cp.dwRequestMicroSecPerFrame = 1; // Set desired frame rate cp.fMakeUserHitOKToCapture = FALSE; cp.fYield = TRUE; // we want capture on a background thread. cp.wNumVideoRequested = (WORD) 1; // we may get less than this - no problem cp.fCaptureAudio = FALSE; cp.vKeyAbort = 0; // If no key is provided, it won't stop... cp.fAbortLeftMouse = FALSE; cp.fAbortRightMouse = FALSE; cp.fLimitEnabled = FALSE; // we want to stop cp.fMCIControl = FALSE; capCaptureSetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); capSetCallbackOnVideoStream(pDoc->m_hCamWnd, VideoCallbackProc); capSetCallbackOnFrame(pDoc->m_hCamWnd, VideoCallbackProc); }
static int vfw_read_header(AVFormatContext *s) { struct vfw_ctx *ctx = s->priv_data; AVCodecContext *codec; AVStream *st; int devnum; int bisize; BITMAPINFO *bi = NULL; CAPTUREPARMS cparms; DWORD biCompression; WORD biBitCount; int ret; AVRational framerate_q; if (!strcmp(s->filename, "list")) { for (devnum = 0; devnum <= 9; devnum++) { char driver_name[256]; char driver_ver[256]; ret = capGetDriverDescription(devnum, driver_name, sizeof(driver_name), driver_ver, sizeof(driver_ver)); if (ret) { av_log(s, AV_LOG_INFO, "Driver %d\n", devnum); av_log(s, AV_LOG_INFO, " %s\n", driver_name); av_log(s, AV_LOG_INFO, " %s\n", driver_ver); } } return AVERROR(EIO); } ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0); if(!ctx->hwnd) { av_log(s, AV_LOG_ERROR, "Could not create capture window.\n"); return AVERROR(EIO); } /* If atoi fails, devnum==0 and the default device is used */ devnum = atoi(s->filename); ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not connect to device.\n"); DestroyWindow(ctx->hwnd); return AVERROR(ENODEV); } SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0); SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0); ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, (LPARAM) videostream_cb); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n"); goto fail; } SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) s); st = avformat_new_stream(s, NULL); if(!st) { vfw_read_close(s); return AVERROR(ENOMEM); } /* Set video format */ bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0); if(!bisize) goto fail; bi = av_malloc(bisize); if(!bi) { vfw_read_close(s); return AVERROR(ENOMEM); } ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) goto fail; dump_bih(s, &bi->bmiHeader); ret = av_parse_video_rate(&framerate_q, ctx->framerate); if (ret < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); goto fail; } if (ctx->video_size) { ret = av_parse_video_size(&bi->bmiHeader.biWidth, &bi->bmiHeader.biHeight, ctx->video_size); if (ret < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto fail; } } if (0) { /* For testing yet unsupported compressions * Copy these values from user-supplied verbose information */ bi->bmiHeader.biWidth = 320; bi->bmiHeader.biHeight = 240; bi->bmiHeader.biPlanes = 1; bi->bmiHeader.biBitCount = 12; bi->bmiHeader.biCompression = MKTAG('I','4','2','0'); bi->bmiHeader.biSizeImage = 115200; dump_bih(s, &bi->bmiHeader); } ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n"); goto fail; } biCompression = bi->bmiHeader.biCompression; biBitCount = bi->bmiHeader.biBitCount; /* Set sequence setup */ ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail; dump_captureparms(s, &cparms); cparms.fYield = 1; // Spawn a background thread cparms.dwRequestMicroSecPerFrame = (framerate_q.den*1000000) / framerate_q.num; cparms.fAbortLeftMouse = 0; cparms.fAbortRightMouse = 0; cparms.fCaptureAudio = 0; cparms.vKeyAbort = 0; ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail; codec = st->codec; codec->time_base = av_inv_q(framerate_q); codec->codec_type = AVMEDIA_TYPE_VIDEO; codec->width = bi->bmiHeader.biWidth; codec->height = bi->bmiHeader.biHeight; codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount); if(codec->pix_fmt == AV_PIX_FMT_NONE) { codec->codec_id = vfw_codecid(biCompression); if(codec->codec_id == AV_CODEC_ID_NONE) { av_log(s, AV_LOG_ERROR, "Unknown compression type. " "Please report verbose (-v 9) debug information.\n"); vfw_read_close(s); return AVERROR_PATCHWELCOME; } codec->bits_per_coded_sample = biBitCount; } else { codec->codec_id = AV_CODEC_ID_RAWVIDEO; if(biCompression == BI_RGB) { codec->bits_per_coded_sample = biBitCount; codec->extradata = av_malloc(9 + AV_INPUT_BUFFER_PADDING_SIZE); if (codec->extradata) { codec->extradata_size = 9; memcpy(codec->extradata, "BottomUp", 9); } } } av_freep(&bi); avpriv_set_pts_info(st, 32, 1, 1000); ctx->mutex = CreateMutex(NULL, 0, NULL); if(!ctx->mutex) { av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" ); goto fail; } ctx->event = CreateEvent(NULL, 1, 0, NULL); if(!ctx->event) { av_log(s, AV_LOG_ERROR, "Could not create Event.\n" ); goto fail; } ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" ); goto fail; } return 0; fail: av_freep(&bi); vfw_read_close(s); return AVERROR(EIO); }
static int vfw_read_header(AVFormatContext *s, AVFormatParameters *ap) { struct vfw_ctx *ctx = s->priv_data; AVCodecContext *codec; AVStream *st; int devnum; int bisize; BITMAPINFO *bi; CAPTUREPARMS cparms; DWORD biCompression; WORD biBitCount; int width; int height; int ret; if(!ap->time_base.den) { av_log(s, AV_LOG_ERROR, "A time base must be specified.\n"); return AVERROR_IO; } ctx->s = s; ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0); if(!ctx->hwnd) { av_log(s, AV_LOG_ERROR, "Could not create capture window.\n"); return AVERROR_IO; } /* If atoi fails, devnum==0 and the default device is used */ devnum = atoi(s->filename); ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not connect to device.\n"); DestroyWindow(ctx->hwnd); return AVERROR(ENODEV); } SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0); SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0); ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, (LPARAM) videostream_cb); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n"); goto fail_io; } SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) ctx); st = av_new_stream(s, 0); if(!st) { vfw_read_close(s); return AVERROR_NOMEM; } /* Set video format */ bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0); if(!bisize) goto fail_io; bi = av_malloc(bisize); if(!bi) { vfw_read_close(s); return AVERROR_NOMEM; } ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) goto fail_bi; dump_bih(s, &bi->bmiHeader); width = ap->width ? ap->width : bi->bmiHeader.biWidth ; height = ap->height ? ap->height : bi->bmiHeader.biHeight; bi->bmiHeader.biWidth = width ; bi->bmiHeader.biHeight = height; #if 0 /* For testing yet unsupported compressions * Copy these values from user-supplied verbose information */ bi->bmiHeader.biWidth = 320; bi->bmiHeader.biHeight = 240; bi->bmiHeader.biPlanes = 1; bi->bmiHeader.biBitCount = 12; bi->bmiHeader.biCompression = MKTAG('I','4','2','0'); bi->bmiHeader.biSizeImage = 115200; dump_bih(s, &bi->bmiHeader); #endif ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n"); goto fail_bi; } biCompression = bi->bmiHeader.biCompression; biBitCount = bi->bmiHeader.biBitCount; av_free(bi); /* Set sequence setup */ ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail_io; dump_captureparms(s, &cparms); cparms.fYield = 1; // Spawn a background thread cparms.dwRequestMicroSecPerFrame = (ap->time_base.num*1000000) / ap->time_base.den; cparms.fAbortLeftMouse = 0; cparms.fAbortRightMouse = 0; cparms.fCaptureAudio = 0; cparms.vKeyAbort = 0; ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail_io; codec = st->codec; codec->time_base = ap->time_base; codec->codec_type = CODEC_TYPE_VIDEO; codec->width = width; codec->height = height; codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount); if(codec->pix_fmt == PIX_FMT_NONE) { codec->codec_id = vfw_codecid(biCompression); if(codec->codec_id == CODEC_ID_NONE) { av_log(s, AV_LOG_ERROR, "Unknown compression type. " "Please report verbose (-v 9) debug information.\n"); vfw_read_close(s); return AVERROR_PATCHWELCOME; } codec->bits_per_coded_sample = biBitCount; } else { codec->codec_id = CODEC_ID_RAWVIDEO; if(biCompression == BI_RGB) codec->bits_per_coded_sample = biBitCount; } av_set_pts_info(st, 32, 1, 1000); ctx->mutex = CreateMutex(NULL, 0, NULL); if(!ctx->mutex) { av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" ); goto fail_io; } ctx->event = CreateEvent(NULL, 1, 0, NULL); if(!ctx->event) { av_log(s, AV_LOG_ERROR, "Could not create Event.\n" ); goto fail_io; } ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" ); goto fail_io; } return 0; fail_bi: av_free(bi); fail_io: vfw_read_close(s); return AVERROR_IO; }
long __stdcall DlgProc ( HWND hWnd , unsigned msg , unsigned wParam , long lParam ) { switch(msg) { case WM_INITDIALOG: //hEdit = GetDlgItem( hWnd , I_EDIT ); //GetClientRect( hEdit , &rect ); hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, 320, 240, hWnd, 1235 ); //hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, (rect.right-rect.left ), (rect.bottom-rect.top), hEdit, 1235); // вручную заполняем структуру CapVar ZeroMemory( &CapVar, sizeof(COMPVARS) ); CapVar.cbSize = sizeof(COMPVARS); CapVar.dwFlags = ICMF_COMPVARS_VALID; CapVar.cbState = 0; CapVar.fccHandler = mmioFOURCC( 'x', '2', '6', '4' ); CapVar.fccType = ICTYPE_VIDEO; // открываем декомпрессор (долго) CapVar.hic = ICOpen( ICTYPE_VIDEO, CapVar.fccHandler, ICMODE_COMPRESS ); hThread = CreateThread( NULL, 0, (LPTHREAD_START_ROUTINE)SendThread, NULL, 0, 0 ); return -1 ; case WM_COMMAND: switch(LOWORD(wParam)) { case I_BUTTON_CONN : if( !capDriverConnect( hWndCap, 0 ) ) { EndDialog ( hWnd, 0 ); return -1; } capCaptureGetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); CapParms.dwRequestMicroSecPerFrame = 66000; CapParms.fLimitEnabled = FALSE; CapParms.fCaptureAudio = FALSE; CapParms.fMCIControl = FALSE; CapParms.fYield = TRUE; CapParms.vKeyAbort = VK_ESCAPE; CapParms.fAbortLeftMouse = FALSE; CapParms.fAbortRightMouse = FALSE; capCaptureSetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); capPreviewScale( hWndCap, 1 ); capPreviewRate( hWndCap, 66 ); capPreviewScale( hWndCap, FALSE ); capPreview( hWndCap, 1 ); //added by jimmy // OPTIONAL STEP: Setup resolution capGetVideoFormat( hWndCap, &InputBmpInfo ,sizeof(InputBmpInfo) ); //InputBmpInfo.bmiHeader.biWidth = 320; //(rect.right-rect.left ); //InputBmpInfo.bmiHeader.biHeight = 240; //(rect.bottom-rect.top); //InputBmpInfo.bmiHeader.biBitCount = 24; capSetVideoFormat( hWndCap, &InputBmpInfo, sizeof(InputBmpInfo) ); //capDriverDisconnect (hWndCap, 0);//Can we do better? //capDriverConnect (hWndCap, 0); capSetCallbackOnFrame( hWndCap, FrameCallBack ); if(CapVar.hic > 0 ) { OutFormatSize = ICCompressGetFormatSize( CapVar.hic, &InputBmpInfo.bmiHeader ); // BITMAPINFO возвращает размер структуры исходных данных InputBmpInfo ICCompressGetFormat( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // заполняет структуру получаемых данных OutputBmpInfo OutBufferSize = ICCompressGetSize( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // максимальный размер одного сжатого кадра (полученного) ICSeqCompressFrameStart( &CapVar, &InputBmpInfo ); // начало сжатия } break; case I_BUTTON_EXIT : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; break; } return -1 ; case WM_CLOSE : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; return -1 ; } return 0 ; }
BOOL CAviCap::Create(DWORD dwStyle, const RECT& rect, CWnd* pParentWnd, BOOL fAutoSize) { ASSERT(!GetSafeHwnd()); if(GetSafeHwnd()) { iLastError=CAP_CREATE_DUP; return FALSE; //already connected, can't connect twice! } _autosize = fAutoSize; HWND hWnd=capCreateCaptureWindow("AviCap_Basic", dwStyle, rect.left, rect.top, rect.right-rect.left, rect.bottom-rect.top, pParentWnd->GetSafeHwnd(), AVICAP_WINDOW_ID); if(!hWnd) { TRACE("CAviCap Window creation failed\n"); iLastError=CAP_WINCREATION_FAILED; return FALSE; } //subclass standard window SubclassWindow(hWnd); ::SetClassLong(hWnd, GCL_STYLE, ::GetClassLong(hWnd,GCL_STYLE)|CS_DBLCLKS); #ifdef ON_CONNECT_CHECK_DRIVERLIST _getDrvList(); #endif return TRUE; }
//--------------------------------------------------------------------------- void __fastcall TForm1::BitBtn1Click(TObject *Sender) { /* Graphics::TBitmap *Buffer; hWndC=capCreateCaptureWindow("",WS_CHILD, this->Left, this->Top, this->Width, this->Height, this->Handle, 11011); Buffer=new Graphics::TBitmap; capDriverConnect(hWndC,0); // подключаемся к [первой] камере (тут загорается светодиод) capGrabFrame(hWndC); // дожидаемся и берём кадр с камеры bool capOK=capEditCopy(hWndC); // копируем кадр в буфер обмена capDriverDisconnect(hWndC); // отключаемся от камеры (тут светодиод гаснет) DestroyWindow(hWndC); // убираем окно if (capOK) { // если всё получилось TClipboard *pCB=Clipboard(); // буфер обмена windows Graphics::TBitmap *Buffer=new Graphics::TBitmap; // сюда писать кадр будем Buffer->LoadFromClipboardFormat(CF_BITMAP,pCB->GetAsHandle(CF_BITMAP),0);// считываем из буфера // Form1->Canvas->Draw(0,0,Buffer); // ну и например, показываем // Buffer->SaveToFile("kadr.bmp"); // или в файл записываем // ... // или ещё чего pCB->Clear(); // очищаем буфер обмена delete Buffer; // освобождаем память */ hWndC=capCreateCaptureWindow("",WS_CHILD, this->Left, this->Top, this->Width, this->Height, this->Handle, 11011); capDriverConnect (hWndC,0); Timer2->Enabled=true; }