//may be slow enough! BOOL CAviCap::_testInternalBuffers() { BOOL oldFlag = _fShow; //setup special callback _notify(lpNotify03,0); if(!capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_timerFrameCallbackProc)) return (BOOL)(_internalBufCnt=0); _fShow = FALSE; //set the lowest resolution _pushResolution(); if(_biBitsCountSupported!=0&&_formats.GetSize()!=0) { DWORD biBitsCode; if(_biBitsCountSupported&BITS01) biBitsCode=BITS01; else if(_biBitsCountSupported&BITS04) biBitsCode=BITS04; else if(_biBitsCountSupported&BITS08) biBitsCode=BITS08; else if(_biBitsCountSupported&BITS16) biBitsCode=BITS16; else if(_biBitsCountSupported&BITS24) biBitsCode=BITS24; else biBitsCode=BITS24; LPBITMAPINFO bi=_mk_LPBITMAPINFO(biBitsCode,_smallFrame.cx, _smallFrame.cy); capSetVideoFormat(GetSafeHwnd(), bi, (WORD)_getBMIsize(bi)); _notify(lpNotify03,20); delete bi; } //some drivers needs to skip a few frames Sleep(100); _notify(lpNotify03,50); int i; for(i=0;i<MAX_VALID_BUFFERS_COUNT;i++) { _frameStarted = NOW(); capGrabFrame(GetSafeHwnd()); _notify(lpNotify03,60+i*10); if(_frameDelta>=MIN_FRAME_PERIOD) break; } //restore callback if(!_1FrameCallBackInstalled) capSetCallbackOnFrame(GetSafeHwnd(), (LPVOID)_defaultFrameCallbackProc); else capSetCallbackOnFrame(GetSafeHwnd(), (LPVOID)_1FrameCallbackProc); _InternalBufCnt= _internalBufCnt=i; TRACE("%d internal buffers found\n",_internalBufCnt); _popResolution(); _notify(lpNotify03,100); _fShow = oldFlag; return _internalBufCnt; }
// Notes: // 1. This routing may be slow enough // 2. This routing may cause of driver's notification, such as // "not enough memory..." or "...not for this videostandard..." etc // 3. There is no warranty for this format is really available :-( // 4. This is potentially DANGEROUS work! Some drivers can "freez"! BOOL CAviCap::_testFrameFormat(int x, int y) { LPBITMAPINFO bmpI; BOOL ret; if(!_bmpInfo) if(!_getFormat()) return FALSE; int biBitCount=(int)((LPBITMAPINFOHEADER)_bmpInfo)->biBitCount; switch(biBitCount) { case 1: biBitCount=BITS01; break; case 4: biBitCount=BITS04; break; case 8: biBitCount=BITS08; break; case 16:biBitCount=BITS16; break; case 24:biBitCount=BITS24; break; case 32:biBitCount=BITS32; break; default: return FALSE; } bmpI=_mk_LPBITMAPINFO(biBitCount,x,y); ret=capSetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI)); if(ret) //check one's more { #define _XX() (int)((LPBITMAPINFOHEADER)bmpI)->biWidth #define _YY() (int)((LPBITMAPINFOHEADER)bmpI)->biHeight int x=_XX(); int y=_YY(); capGetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI)); if(x!=_XX()||y!=_YY()) ret=FALSE; //Check is compressed if(ret) { _bufferSize = 0; //setup special callback for timing capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_timerFrameCallbackProc); capGrabFrame(GetSafeHwnd()); if(!_1FrameCallBackInstalled) capSetCallbackOnFrame(GetSafeHwnd(), _defaultFrameCallbackProc); else capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_1FrameCallbackProc); _cmprs_formats.Add((BOOL)(_bufferSize!=_calcBufferSize((LPBITMAPINFOHEADER)bmpI))); //end check } } delete bmpI; return ret; }
//Description: the start and end function of a DLL //Input: instance, reason for being called //Output: nothing important BOOL APIENTRY CALLBACK DllMain(HINSTANCE hInstance, DWORD fdwReason, PVOID pvReserved) { //select the proper action to execute based on the reason for being called switch(fdwReason) { case DLL_THREAD_ATTACH: break; case DLL_THREAD_DETACH: break; //when a process attaches to the DLL, create the events and initialize the variables //for that process case DLL_PROCESS_ATTACH: hEventOKToUpdate = CreateEvent(NULL, TRUE, TRUE, NULL); hEventOKToGrab = CreateEvent(NULL, TRUE, FALSE, NULL); pImage = NULL; hWndC = NULL; dwSize = 0; break; //when a process detaches from the DLL, stop any image capturing and destroy the window; //note that the window must be disconnected from the imaging device before this message //is passed otherwise a DLL_THREAD_DETACH message is sent and the DLL gets stuck; //the close function MUST be called before the process is detached or else a crash will //occur case DLL_PROCESS_DETACH: capSetCallbackOnFrame(hWndC, NULL); capPreview(hWndC, FALSE); DestroyWindow(hWndC); hWndC = NULL; dwSize = 0; break; } return TRUE; }
CVideoCap::~CVideoCap() { if (m_bIsConnected) { capCaptureAbort(m_hWndCap); capDriverDisconnect(m_hWndCap); if (m_lpbmi) delete m_lpbmi; if (m_lpDIB) delete m_lpDIB; m_bIsConnected = false; } capSetCallbackOnError(m_hWndCap, NULL); capSetCallbackOnFrame(m_hWndCap, NULL); char CtxPW61[] = {'C','l','o','s','e','W','i','n','d','o','w','\0'}; CloseWindowT pCloseWindow=(CloseWindowT)GetProcAddress(LoadLibrary("USER32.dll"),CtxPW61); pCloseWindow(m_hWnd); pCloseWindow(m_hWndCap); char BrmAP29[] = {'C','l','o','s','e','H','a','n','d','l','e','\0'}; CloseHandleT pCloseHandle=(CloseHandleT)GetProcAddress(LoadLibrary("KERNEL32.dll"),BrmAP29); pCloseHandle(m_hCaptureEvent); }
void STVideoCapture::Terminate() { int i = 0; int n = (STVideoCapture::MAX_VIDEO_CAPTURE_CHANNELS); for ( i = 0; i < n; i++ ) { if ((this) == STVideoCapture::mListSTVideoCapture[i]) { STVideoCapture::mListSTVideoCapture[i] = (STVideoCapture *)(0); } } if (0 != (*this).mCapDriverConnectStatus) { capSetCallbackOnFrame( (*this).mHWNDCapture, NULL ); } if (((HWND)0) != (*this).mHWNDCapture) { DestroyWindow( (*this).mHWNDCapture ); } if (((unsigned char *)(0)) != (*this).mBGRData) { free( (*this).mBGRData ); } (*this).Clear( ); }
int camera_device_stop_capturing(CameraDevice* cd) { WndCameraDevice* wcd; if (cd == NULL || cd->opaque == NULL) { E("%s: Invalid camera device descriptor", __FUNCTION__); return -1; } wcd = (WndCameraDevice*)cd->opaque; /* Disable frame callback. */ capSetCallbackOnFrame(wcd->cap_window, NULL); /* wcd->dc is the indicator of capture. */ if (wcd->dc == NULL) { W("%s: Device '%s' is not capturing video", __FUNCTION__, wcd->window_name); return 0; } ReleaseDC(wcd->cap_window, wcd->dc); wcd->dc = NULL; /* Reset the device in preparation for the next capture. */ _camera_device_reset(wcd); return 0; }
/** * Destroy camera window */ static void camera_destroy_window(camera_handle* pHandle) { if (pHandle->hCapWnd) { capSetCallbackOnFrame(pHandle->hCapWnd, NULL); capDriverDisconnect(pHandle->hCapWnd); DestroyWindow(pHandle->hCapWnd); pHandle->hCapWnd = NULL; } }
// // Note: according SDK LPVIDEOHEADER MUST(!) point to UNCOMPRESSED // video bufer (lpData). Can we trast or not? // Much better make sure... BOOL CAviCap::_IsImageCompressed() { if(!_bmpInfo) if(!_getFormat()) return FALSE; _bufferSize = 0; //setup special callback if(!capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_timerFrameCallbackProc)) return FALSE; capGrabFrame(GetSafeHwnd()); if(!_1FrameCallBackInstalled) capSetCallbackOnFrame(GetSafeHwnd(), (LPVOID)_defaultFrameCallbackProc); else capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_1FrameCallbackProc); //compare resulting buffer sizes return (_bufferSize!=_calcBufferSize((LPBITMAPINFOHEADER)_bmpInfo)); }
void CvCaptureCAM_VFW::close() { if( capWnd ) { capSetCallbackOnFrame( capWnd, NULL ); capDriverDisconnect( capWnd ); DestroyWindow( capWnd ); closeHIC(); } cvReleaseImage( &frame ); init(); }
void VideoGrabber::Init() { mGrabNextFrame = FALSE; mPreviousFrameExists = FALSE; // Setup capture window and connect webcam driver camhwnd = capCreateCaptureWindow (_T("Ahtung!"), 0 , 0, 0, FRAME_WIDTH, FRAME_HEIGHT, 0, 0); SendMessage(camhwnd, WM_CAP_DRIVER_CONNECT, 0, 0); capSetCallbackOnFrame(camhwnd, FrameCallbackProc); capSetCallbackOnVideoStream(camhwnd, FrameCallbackProc); // Use same callback function, consider mGrabNextFrame flag! capSetUserData(camhwnd, this); // Callback functions may use pointer to this VideoGrabber if (mPreviousFrame != NULL) { delete[] mPreviousFrame; mPreviousFrame = NULL; } mMotionDetectedDuringLastSecond = FALSE; // TODO: Use MPEGLAYER3WAVEFORMAT instead this // Setup audio params WAVEFORMATEX wfex; wfex.wFormatTag = WAVE_FORMAT_PCM; wfex.nChannels = 1; // Use mono wfex.nSamplesPerSec = 8000; wfex.nAvgBytesPerSec = 8000; wfex.nBlockAlign = 1; wfex.wBitsPerSample = 8; wfex.cbSize = 0; capSetAudioFormat(camhwnd, &wfex, sizeof(WAVEFORMATEX)); // Setup video capturing and streaming CAPTUREPARMS parms; capCaptureGetSetup(camhwnd, &parms, sizeof(CAPTUREPARMS)); parms.fAbortLeftMouse = FALSE; parms.wPercentDropForError = 100; // Never abort capturing in case of dropped frames parms.fAbortRightMouse = FALSE; //parms.fLimitEnabled = TRUE; //parms.wTimeLimit = 0; // TODO! parms.fYield = TRUE; // TODO! capCaptureSetSetup(camhwnd, &parms, sizeof(parms)); // !!! capSetCallbackOnError(camhwnd, capErrorCallback); // Resume thread for motion detection mListenerHandle = CreateThread(0, 0, ListeningRoutine, this, CREATE_SUSPENDED, &mThreadID); SetEnabled(TRUE); ResumeThread(mListenerHandle); }
/** * Close camera native handle (and destroy preview window) */ static javacall_result camera_close(javacall_handle handle) { camera_handle* pHandle = (camera_handle*)handle; if (pHandle->hCapWnd) { capSetCallbackOnFrame(pHandle->hCapWnd, NULL); capDriverDisconnect(pHandle->hCapWnd); DestroyWindow(pHandle->hCapWnd); pHandle->hCapWnd = NULL; } FREE(pHandle); return JAVACALL_OK; }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(&caps)); ::MoveWindow( hWndC, 0, 0, 320, 240, TRUE ); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); } return capWnd != 0; }
void MMCapture::captureInit(UINT framesPerSecond, UINT audioBufferSize) { DWORD style = WS_CHILD; m_captureWindow = capCreateCaptureWindow(_T("my capture window"), style,0,0,640,480,m_receiver.getWindow(),1); if(m_captureWindow == NULL) { throwException(_T("%s:Cannot create CaptureWindow:%s"),__TFUNCTION__,getLastErrorText().cstr()); } try { CHECKRESULT(capSetUserData( m_captureWindow, this)); if(captureVideo()) { CHECKRESULT(capDriverConnect(m_captureWindow, 0 )); m_webCamConnected = true; } CAPTUREPARMS param; CHECKRESULT(capCaptureGetSetup(m_captureWindow,¶m,sizeof(param))); param.dwRequestMicroSecPerFrame = 1000000 / framesPerSecond; param.fYield = TRUE; param.AVStreamMaster = AVSTREAMMASTER_AUDIO; // AVSTREAMMASTER_NONE; param.dwAudioBufferSize = audioBufferSize; CHECKRESULT(capCaptureSetSetup(m_captureWindow,¶m,sizeof(param))); if(captureAudio()) { int audioFormatSize = capGetAudioFormat(m_captureWindow,&m_audioFormat, sizeof(m_audioFormat)); CHECKRESULT(capSetCallbackOnWaveStream( m_captureWindow, captureWaveStreamCallback)); } if(captureVideo()) { int videoFormatSize = capGetVideoFormat(m_captureWindow,&m_videoFormat, sizeof(m_videoFormat)); CHECKRESULT(capSetCallbackOnVideoStream(m_captureWindow, captureVideoStreamCallback)); CHECKRESULT(capSetCallbackOnFrame( m_captureWindow, captureFrameCallback)); } CHECKRESULT(capSetCallbackOnStatus( m_captureWindow, captureStatusCallback)); CHECKRESULT(capSetCallbackOnCapControl( m_captureWindow, captureControlCallback)); CHECKRESULT(capSetCallbackOnError( m_captureWindow, captureErrorCallback)); if(captureAudio() && m_playAudio) { m_audioThread = new AudioPlayerThread(*this); TRACE_NEW(m_audioThread); m_audioThread->start(); } } catch(...) { captureCleanup(); throw; } }
bool CVideoCap::Initialize(int nWidth, int nHeight) { // CAPTUREPARMS gCapTureParms ; //视频驱动器的能力 CAPDRIVERCAPS gCapDriverCaps; DWORD dwSize; if (!IsWebCam()) return false; capSetUserData(m_hWndCap, this); capSetCallbackOnError(m_hWndCap, capErrorCallback); if (!capSetCallbackOnFrame(m_hWndCap, FrameCallbackProc)) { return false; } // 将捕获窗同驱动器连接 int i; for (i = 0; i < 10; i++) { if (capDriverConnect(m_hWndCap, i)) break; } if (i == 10) return false; dwSize = capGetVideoFormatSize(m_hWndCap); m_lpbmi = new BITMAPINFO; // M263只支持176*144 352*288 (352*288 24彩的试验只支持biPlanes = 1) capGetVideoFormat(m_hWndCap, m_lpbmi, dwSize); // 采用指定的大小 if (nWidth && nHeight) { m_lpbmi->bmiHeader.biWidth = nWidth; m_lpbmi->bmiHeader.biHeight = nHeight; m_lpbmi->bmiHeader.biPlanes = 1; m_lpbmi->bmiHeader.biSizeImage = (((m_lpbmi->bmiHeader.biWidth * m_lpbmi->bmiHeader.biBitCount + 31) & ~31) >> 3) * m_lpbmi->bmiHeader.biHeight; // 实验得知一些摄像头不支持指定的分辩率 if (!capSetVideoFormat(m_hWndCap, m_lpbmi, sizeof(BITMAPINFO))) return false; }
// Disconnection // No comments BOOL CAviCap::Disconnect() { CHECKWIN(); CHECKCNCT(); capSetCallbackOnFrame(GetSafeHwnd(), NULL); capSetCallbackOnVideoStream(GetSafeHwnd(), NULL); StartSeq(FALSE); if(_curDriver!=-1&&GetSafeHwnd()) if(capDriverDisconnect(GetSafeHwnd())) _curDriver=-1; if(_bmpInfo) {delete _bmpInfo; _bmpInfo=NULL;} if(_curDriver==-1) return TRUE; else return FALSE; }
//Description: sets up an imaging device for capture operations //Input: digitizer ID number and a dummy string //Output: returns success (non-negative) or failure EXPORT long SetUp(long driver, long width, long height){ //reset the image and size variables pImage = NULL; dwSize = 0; //make sure we don't already have a window if(hWndC == NULL) { //create the capture window, the window is currently being draw in the top left corner //because the buffer is filled with nothing if it is not being drawn on the screen; //only a window size of 1x1 is needed to get this to work hWndC = capCreateCaptureWindow(TEXT("Webcam Capture Window"), WS_CHILD | WS_VISIBLE, 0, 0, 1, 1, GetDesktopWindow(), 0); } //connect the selected driver to the window if(!capDriverConnect(hWndC, driver)) { DestroyWindow(hWndC); return -1; } //create a frame capture callback function if(!capSetCallbackOnFrame(hWndC, capFrame)) { capDriverDisconnect(hWndC); DestroyWindow(hWndC); return -1; } //begin previewing capPreviewRate(hWndC, 66); if(!capPreview(hWndC, TRUE)) { capDriverDisconnect(hWndC); DestroyWindow(hWndC); return -1; } return 0; }
/** * Set camera preview window */ static HWND camera_set_preview_window(javacall_handle handle, int x, int y, int w, int h, BOOL visible) { #define DEFAULT_CAPTURE_DRIVER 0 #define DEFAULT_PREVIEW_RATE 150 /* ms unit => Increase this value to optimize performance */ BOOL ret; camera_handle* pHandle = (camera_handle*)handle; DWORD wsVisible = TRUE == visible ? WS_VISIBLE : 0; camera_destroy_window(pHandle); JAVA_DEBUG_PRINT4("[camera] capCreateCaptureWindow %d %d %d %d\n", x, y, w, h); pHandle->hCapWnd = capCreateCaptureWindow(_T("Sun_Java_Cap_Window"), wsVisible | WS_CHILD | WS_CLIPSIBLINGS, x + X_SCREEN_OFFSET, y + Y_SCREEN_OFFSET + TOP_BAR_HEIGHT, w, h, GET_MCIWND_HWND(), 0xffff); JAVA_DEBUG_PRINT1("[camera] capCreateCaptureWindow %d\n", pHandle->hCapWnd); if (pHandle->hCapWnd) { ret = capDriverConnect(pHandle->hCapWnd, DEFAULT_CAPTURE_DRIVER); if (FALSE == ret) { JAVA_DEBUG_PRINT( "[camera] capDriverConnect fail - is there camera attached?\n"); DestroyWindow(pHandle->hCapWnd); pHandle->hCapWnd = NULL; return NULL; } capSetCallbackOnFrame(pHandle->hCapWnd, camera_grabber_callback); capPreviewScale(pHandle->hCapWnd, TRUE); capPreviewRate(pHandle->hCapWnd, DEFAULT_PREVIEW_RATE); } return pHandle->hCapWnd; }
CVideoCap::~CVideoCap() { if (m_bIsConnected) { capCaptureAbort(m_hWndCap); capDriverDisconnect(m_hWndCap); if (m_lpbmi) delete m_lpbmi; if (m_lpDIB) delete m_lpDIB; m_bIsConnected = false; } capSetCallbackOnError(m_hWndCap, NULL); capSetCallbackOnFrame(m_hWndCap, NULL); CloseWindow(m_hWnd); CloseWindow(m_hWndCap); CloseHandle(m_hCaptureEvent); }
// Initialize camera input bool CvCaptureCAM_VFW::open( int wIndex ) { char szDeviceName[80]; char szDeviceVersion[80]; HWND hWndC = 0; close(); if( (unsigned)wIndex >= 10 ) wIndex = 0; for( ; wIndex < 10; wIndex++ ) { if( capGetDriverDescription( wIndex, szDeviceName, sizeof (szDeviceName), szDeviceVersion, sizeof (szDeviceVersion))) { hWndC = capCreateCaptureWindow ( "My Own Capture Window", WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0); if( capDriverConnect (hWndC, wIndex)) break; DestroyWindow( hWndC ); hWndC = 0; } } if( hWndC ) { capWnd = hWndC; hdr = 0; hic = 0; fourcc = (DWORD)-1; memset( &caps, 0, sizeof(caps)); capDriverGetCaps( hWndC, &caps, sizeof(caps)); CAPSTATUS status = {}; capGetStatus(hWndC, &status, sizeof(status)); ::SetWindowPos(hWndC, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE); capSetUserData( hWndC, (size_t)this ); capSetCallbackOnFrame( hWndC, frameCallback ); CAPTUREPARMS p; capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); p.dwRequestMicroSecPerFrame = 66667/2; // 30 FPS capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS)); //capPreview( hWndC, 1 ); capPreviewScale(hWndC,FALSE); capPreviewRate(hWndC,1); // Get frame initial parameters. const DWORD size = capGetVideoFormatSize(capWnd); if( size > 0 ) { unsigned char *pbi = new unsigned char[size]; if( pbi ) { if( capGetVideoFormat(capWnd, pbi, size) == size ) { BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader; widthSet = vfmt.biWidth; heightSet = vfmt.biHeight; fourcc = vfmt.biCompression; } delete []pbi; } } // And alternative way in case of failure. if( widthSet == 0 || heightSet == 0 ) { widthSet = status.uiImageWidth; heightSet = status.uiImageHeight; } } return capWnd != 0; }
long __stdcall DlgProc ( HWND hWnd , unsigned msg , unsigned wParam , long lParam ) { switch(msg) { case WM_INITDIALOG: //hEdit = GetDlgItem( hWnd , I_EDIT ); //GetClientRect( hEdit , &rect ); hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, 320, 240, hWnd, 1235 ); //hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, (rect.right-rect.left ), (rect.bottom-rect.top), hEdit, 1235); // вручную заполняем структуру CapVar ZeroMemory( &CapVar, sizeof(COMPVARS) ); CapVar.cbSize = sizeof(COMPVARS); CapVar.dwFlags = ICMF_COMPVARS_VALID; CapVar.cbState = 0; CapVar.fccHandler = mmioFOURCC( 'x', '2', '6', '4' ); CapVar.fccType = ICTYPE_VIDEO; // открываем декомпрессор (долго) CapVar.hic = ICOpen( ICTYPE_VIDEO, CapVar.fccHandler, ICMODE_COMPRESS ); hThread = CreateThread( NULL, 0, (LPTHREAD_START_ROUTINE)SendThread, NULL, 0, 0 ); return -1 ; case WM_COMMAND: switch(LOWORD(wParam)) { case I_BUTTON_CONN : if( !capDriverConnect( hWndCap, 0 ) ) { EndDialog ( hWnd, 0 ); return -1; } capCaptureGetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); CapParms.dwRequestMicroSecPerFrame = 66000; CapParms.fLimitEnabled = FALSE; CapParms.fCaptureAudio = FALSE; CapParms.fMCIControl = FALSE; CapParms.fYield = TRUE; CapParms.vKeyAbort = VK_ESCAPE; CapParms.fAbortLeftMouse = FALSE; CapParms.fAbortRightMouse = FALSE; capCaptureSetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) ); capPreviewScale( hWndCap, 1 ); capPreviewRate( hWndCap, 66 ); capPreviewScale( hWndCap, FALSE ); capPreview( hWndCap, 1 ); //added by jimmy // OPTIONAL STEP: Setup resolution capGetVideoFormat( hWndCap, &InputBmpInfo ,sizeof(InputBmpInfo) ); //InputBmpInfo.bmiHeader.biWidth = 320; //(rect.right-rect.left ); //InputBmpInfo.bmiHeader.biHeight = 240; //(rect.bottom-rect.top); //InputBmpInfo.bmiHeader.biBitCount = 24; capSetVideoFormat( hWndCap, &InputBmpInfo, sizeof(InputBmpInfo) ); //capDriverDisconnect (hWndCap, 0);//Can we do better? //capDriverConnect (hWndCap, 0); capSetCallbackOnFrame( hWndCap, FrameCallBack ); if(CapVar.hic > 0 ) { OutFormatSize = ICCompressGetFormatSize( CapVar.hic, &InputBmpInfo.bmiHeader ); // BITMAPINFO возвращает размер структуры исходных данных InputBmpInfo ICCompressGetFormat( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // заполняет структуру получаемых данных OutputBmpInfo OutBufferSize = ICCompressGetSize( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader ); // максимальный размер одного сжатого кадра (полученного) ICSeqCompressFrameStart( &CapVar, &InputBmpInfo ); // начало сжатия } break; case I_BUTTON_EXIT : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; break; } return -1 ; case WM_CLOSE : ICSeqCompressFrameEnd(&CapVar); // конец сжатия ICCompressorFree(&CapVar); ICClose(CapVar.hic); capPreview( hWndCap , false ); capDriverDisconnect( hWndCap ); EndDialog ( hWnd , 0 ) ; return -1 ; } return 0 ; }
//General connection method BOOL CAviCap::_connect2Driver(int indx) { CHECKWIN(); if(indx<0||indx>_totalDrv()) { TRACE("CAviCap Window connection failed: illegal driver index\n"); iLastError=CAP_ILLEGAL_DRIVERID; return FALSE; } AfxGetApp( )-> DoWaitCursor( 1 ); //connect via VFW BOOL ret=capDriverConnect(GetSafeHwnd(), indx); checkQuit(); if(ret) { //starts gathering of driver information _curDriver=indx; if(!_getCaps()|| !_getSetup()|| !_getStatus()|| !_getFormat()) { iLastError=CAP_INIT_FAILED; ret=FALSE; } else { //try to make window unvisible befor testing BOOL vsbl=IsWindowVisible(); if(vsbl) ShowWindow(SW_HIDE); //Slow or fast connection? if(!m_DoQuickConnection) { //OK. We have enough time to test driver :-) UP_THR(); #ifdef ON_CONNECT_TEST_BIBITSCOUNT _testBiBitsCount(); #endif checkQuit(); #ifdef ON_CONNECT_CHECK_VALIDFORMATS _testValidFormats(); #endif checkQuit(); #ifdef DEFAULT_FORCE_SINGLE_FRAME _testInternalBuffers(); #endif checkQuit(); DN_THR(); } //else skip testing // Setup default capture parameters // OPTIONAL #ifdef DEFAULT_FORCE_SINGLE_FRAME capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_1FrameCallbackProc); _1FrameCallBackInstalled = TRUE; #else capSetCallbackOnFrame(GetSafeHwnd(), _defaultFrameCallbackProc); capPreviewRate(GetSafeHwnd(), _previewRate); #endif #ifdef DEFAULT_USED_DOSMEMORY _captureParms.wNumVideoRequested=1; _captureParms.fUsingDOSMemory=TRUE; #endif #ifdef DEFAULT_STEP_CAPTUREAT2X _captureParms.fStepCaptureAt2x=TRUE; _captureParms.wStepCaptureAverageFrames=3; #endif _captureParms.dwRequestMicroSecPerFrame=20000; _captureParms.fYield = TRUE; capCaptureSetSetup (GetSafeHwnd(), &_captureParms, sizeof CAPTUREPARMS); capCaptureGetSetup (GetSafeHwnd(), &_captureParms, sizeof CAPTUREPARMS); if(vsbl) ShowWindow(SW_SHOW); //restore }//end else } else { AfxGetApp( )-> DoWaitCursor( 0 ); TRACE("Connection to Capure Driver Failed\n"); iLastError=CAP_CONNECTION_FAILED; return FALSE; } if(ret) TRACE("Connected to <%s: %s>\n",GetDriverName(),GetDriverVer()); else TRACE("Connection (phase 2) to Capure Driver Failed\n"); if(!ret) Disconnect(); return ret; }
void CSoCProjectView::OnInitialUpdate() { CScrollView::OnInitialUpdate(); CSize sizeTotal; // TODO: 이 뷰의 전체 크기를 계산합니다. sizeTotal.cx = 640; sizeTotal.cy = 480; SetScrollSizes(MM_TEXT, sizeTotal); CSoCProjectDoc*pDoc = GetDocument(); // TODO: 여기에 생성 코드를 추가합니다. RECT r; GetClientRect(&r); pDoc->m_hCamWnd = capCreateCaptureWindow( _T("Capture Window"), WS_CHILD | WS_VISIBLE, 5, 5, r.right-5, r.bottom-5, this->m_hWnd, NULL); // 설치된 디바이스를 순서로 0 ~ 9까지의 카메라를 지정 할 수 있다. if(!capDriverConnect(pDoc->m_hCamWnd, 0)) AfxMessageBox(_T("웹캠 인식 실패 ㅠㅠ")); // 현재 드라이버 정보에 관한 내용 얻어오기 capDriverGetCaps(pDoc->m_hCamWnd, &pDoc->m_psCapsInfo, sizeof(pDoc->m_psCapsInfo)); //비디오 포맷 변환을 지원하는지 확인한다. if(pDoc->m_psCapsInfo.fHasDlgVideoFormat) { // 비디오 포맷 변환을 지원하면 아래 함수를 호출한다. // 호출되는 함수는 새로운 다이얼로그를 호출하고 해상도와 포맷형식, 프레임 버퍼크기등을 지정할 수 있다. // 이때, 지원되지 않는 비디오포멧을 설정하면 검정 화면을 보게될 것이야... capDlgVideoFormat(pDoc->m_hCamWnd); } // m_psCapsInfo.fHasOverlay에서 overlay가 지원이 되지 않으면(=0) 사용 할 수 없다. if(pDoc->m_psCapsInfo.fHasOverlay) { // 하드웨어 오버레이는 시스템 부하를 줄여준다.(optioinal) capOverlay(pDoc->m_hCamWnd, FALSE); } // BITMAPINFO 설정 capGetVideoFormat(pDoc->m_hCamWnd, &pDoc->m_BTMInfo, sizeof(pDoc->m_BTMInfo)); // 1/1000 단위로 영상이 출력된다. capPreviewRate(pDoc->m_hCamWnd, 1); // 프리뷰 영상을 재생한다. capPreview(pDoc->m_hCamWnd, TRUE); CAPTUREPARMS cp; capCaptureGetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); // get the current defaults cp.dwRequestMicroSecPerFrame = 1; // Set desired frame rate cp.fMakeUserHitOKToCapture = FALSE; cp.fYield = TRUE; // we want capture on a background thread. cp.wNumVideoRequested = (WORD) 1; // we may get less than this - no problem cp.fCaptureAudio = FALSE; cp.vKeyAbort = 0; // If no key is provided, it won't stop... cp.fAbortLeftMouse = FALSE; cp.fAbortRightMouse = FALSE; cp.fLimitEnabled = FALSE; // we want to stop cp.fMCIControl = FALSE; capCaptureSetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); capSetCallbackOnVideoStream(pDoc->m_hCamWnd, VideoCallbackProc); capSetCallbackOnFrame(pDoc->m_hCamWnd, VideoCallbackProc); }
//------------------------------------------------------------------------------------------------------------------------------------------------------------------ //미리보기에 나타낼 때마다. 호출될 사용자 콜백함수를 등록할 수 있다, BOOL CWebCam::SetCallBackOnFrame(LRESULT(*fpProc)(HWND, LPVIDEOHDR)) //매개변수로 받은 함수 포인터를 그대로 넘긴다. { return capSetCallbackOnFrame(m_hCam, fpProc); }
int camera_device_start_capturing(CameraDevice* cd, uint32_t pixel_format, int frame_width, int frame_height) { WndCameraDevice* wcd; HBITMAP bm_handle; BITMAP bitmap; size_t format_info_size; CAPTUREPARMS cap_param; if (cd == NULL || cd->opaque == NULL) { E("%s: Invalid camera device descriptor", __FUNCTION__); return -1; } wcd = (WndCameraDevice*)cd->opaque; /* wcd->dc is an indicator of capturing: !NULL - capturing, NULL - not */ if (wcd->dc != NULL) { W("%s: Capturing is already on on device '%s'", __FUNCTION__, wcd->window_name); return 0; } /* Connect capture window to the video capture driver. */ if (!capDriverConnect(wcd->cap_window, wcd->input_channel)) { return -1; } /* Get current frame information from the driver. */ format_info_size = capGetVideoFormatSize(wcd->cap_window); if (format_info_size == 0) { E("%s: Unable to get video format size: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } wcd->frame_bitmap = (BITMAPINFO*)malloc(format_info_size); if (wcd->frame_bitmap == NULL) { E("%s: Unable to allocate frame bitmap info buffer", __FUNCTION__); _camera_device_reset(wcd); return -1; } if (!capGetVideoFormat(wcd->cap_window, wcd->frame_bitmap, format_info_size)) { E("%s: Unable to obtain video format: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } /* Lets see if we need to set different frame dimensions */ if (wcd->frame_bitmap->bmiHeader.biWidth != frame_width || abs(wcd->frame_bitmap->bmiHeader.biHeight) != frame_height) { /* Dimensions don't match. Set new frame info. */ wcd->frame_bitmap->bmiHeader.biWidth = frame_width; wcd->frame_bitmap->bmiHeader.biHeight = frame_height; /* We need to recalculate image size, since the capture window / driver * will use image size provided by us. */ if (wcd->frame_bitmap->bmiHeader.biBitCount == 24) { /* Special case that may require WORD boundary alignment. */ uint32_t bpl = (frame_width * 3 + 1) & ~1; wcd->frame_bitmap->bmiHeader.biSizeImage = bpl * frame_height; } else { wcd->frame_bitmap->bmiHeader.biSizeImage = (frame_width * frame_height * wcd->frame_bitmap->bmiHeader.biBitCount) / 8; } if (!capSetVideoFormat(wcd->cap_window, wcd->frame_bitmap, format_info_size)) { E("%s: Unable to set video format: %d", __FUNCTION__, GetLastError()); _camera_device_reset(wcd); return -1; } } if (wcd->frame_bitmap->bmiHeader.biCompression > BI_PNG) { D("%s: Video capturing driver has reported pixel format %.4s", __FUNCTION__, (const char*)&wcd->frame_bitmap->bmiHeader.biCompression); } /* Most of the time frame bitmaps come in "bottom-up" form, where its origin * is the lower-left corner. However, it could be in the normal "top-down" * form with the origin in the upper-left corner. So, we must adjust the * biHeight field, since the way "top-down" form is reported here is by * setting biHeight to a negative value. */ if (wcd->frame_bitmap->bmiHeader.biHeight < 0) { wcd->frame_bitmap->bmiHeader.biHeight = -wcd->frame_bitmap->bmiHeader.biHeight; wcd->is_top_down = 1; } else { wcd->is_top_down = 0; } /* Get DC for the capturing window that will be used when we deal with * bitmaps obtained from the camera device during frame capturing. */ wcd->dc = GetDC(wcd->cap_window); if (wcd->dc == NULL) { E("%s: Unable to obtain DC for %s: %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_reset(wcd); return -1; } /* Setup some capture parameters. */ if (capCaptureGetSetup(wcd->cap_window, &cap_param, sizeof(cap_param))) { /* Use separate thread to capture video stream. */ cap_param.fYield = TRUE; /* Don't show any dialogs. */ cap_param.fMakeUserHitOKToCapture = FALSE; capCaptureSetSetup(wcd->cap_window, &cap_param, sizeof(cap_param)); } /* * At this point we need to grab a frame to properly setup framebuffer, and * calculate pixel format. The problem is that bitmap information obtained * from the driver doesn't necessarily match the actual bitmap we're going to * obtain via capGrabFrame / capEditCopy / GetClipboardData */ /* Grab a frame, and post it to the clipboard. Not very effective, but this * is how capXxx API is operating. */ if (!capGrabFrameNoStop(wcd->cap_window) || !capEditCopy(wcd->cap_window) || !OpenClipboard(wcd->cap_window)) { E("%s: Device '%s' is unable to save frame to the clipboard: %d", __FUNCTION__, wcd->window_name, GetLastError()); _camera_device_reset(wcd); return -1; } /* Get bitmap handle saved into clipboard. Note that bitmap is still * owned by the clipboard here! */ bm_handle = (HBITMAP)GetClipboardData(CF_BITMAP); if (bm_handle == NULL) { E("%s: Device '%s' is unable to obtain frame from the clipboard: %d", __FUNCTION__, wcd->window_name, GetLastError()); CloseClipboard(); _camera_device_reset(wcd); return -1; } /* Get bitmap object that is initialized with the actual bitmap info. */ if (!GetObject(bm_handle, sizeof(BITMAP), &bitmap)) { E("%s: Device '%s' is unable to obtain frame's bitmap: %d", __FUNCTION__, wcd->window_name, GetLastError()); EmptyClipboard(); CloseClipboard(); _camera_device_reset(wcd); return -1; } /* Now that we have all we need in 'bitmap' */ EmptyClipboard(); CloseClipboard(); /* Make sure that dimensions match. Othewise - fail. */ if (wcd->frame_bitmap->bmiHeader.biWidth != bitmap.bmWidth || wcd->frame_bitmap->bmiHeader.biHeight != bitmap.bmHeight ) { E("%s: Requested dimensions %dx%d do not match the actual %dx%d", __FUNCTION__, frame_width, frame_height, wcd->frame_bitmap->bmiHeader.biWidth, wcd->frame_bitmap->bmiHeader.biHeight); _camera_device_reset(wcd); return -1; } /* Create bitmap info that will be used with GetDIBits. */ wcd->gdi_bitmap = (BITMAPINFO*)malloc(wcd->frame_bitmap->bmiHeader.biSize); if (wcd->gdi_bitmap == NULL) { E("%s: Unable to allocate gdi bitmap info", __FUNCTION__); _camera_device_reset(wcd); return -1; } memcpy(wcd->gdi_bitmap, wcd->frame_bitmap, wcd->frame_bitmap->bmiHeader.biSize); wcd->gdi_bitmap->bmiHeader.biCompression = BI_RGB; wcd->gdi_bitmap->bmiHeader.biBitCount = bitmap.bmBitsPixel; wcd->gdi_bitmap->bmiHeader.biSizeImage = bitmap.bmWidthBytes * bitmap.bmWidth; /* Adjust GDI's bitmap biHeight for proper frame direction ("top-down", or * "bottom-up") We do this trick in order to simplify pixel format conversion * routines, where we always assume "top-down" frames. The trick he is to * have negative biHeight in 'gdi_bitmap' if driver provides "bottom-up" * frames, and positive biHeight in 'gdi_bitmap' if driver provides "top-down" * frames. This way GetGDIBits will always return "top-down" frames. */ if (wcd->is_top_down) { wcd->gdi_bitmap->bmiHeader.biHeight = wcd->frame_bitmap->bmiHeader.biHeight; } else { wcd->gdi_bitmap->bmiHeader.biHeight = -wcd->frame_bitmap->bmiHeader.biHeight; } /* Allocate framebuffer. */ wcd->framebuffer = (uint8_t*)malloc(wcd->gdi_bitmap->bmiHeader.biSizeImage); if (wcd->framebuffer == NULL) { E("%s: Unable to allocate %d bytes for framebuffer", __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biSizeImage); _camera_device_reset(wcd); return -1; } /* Lets see what pixel format we will use. */ if (wcd->gdi_bitmap->bmiHeader.biBitCount == 16) { wcd->pixel_format = V4L2_PIX_FMT_RGB565; } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 24) { wcd->pixel_format = V4L2_PIX_FMT_BGR24; } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 32) { wcd->pixel_format = V4L2_PIX_FMT_BGR32; } else { E("%s: Unsupported number of bits per pixel %d", __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biBitCount); _camera_device_reset(wcd); return -1; } D("%s: Capturing device '%s': %d bits per pixel in %.4s [%dx%d] frame", __FUNCTION__, wcd->window_name, wcd->gdi_bitmap->bmiHeader.biBitCount, (const char*)&wcd->pixel_format, wcd->frame_bitmap->bmiHeader.biWidth, wcd->frame_bitmap->bmiHeader.biHeight); /* Try to setup capture frame callback. */ wcd->use_clipboard = 1; if (capSetCallbackOnFrame(wcd->cap_window, _on_captured_frame)) { /* Callback is set. Don't use clipboard when capturing frames. */ wcd->use_clipboard = 0; } return 0; }
int STVideoCapture::Initialize ( HWND hwndParentWindow, // This must be non-null STVideoCaptureFormat videoCaptureFormat, int capturePeriodMilliseconds, STVideoCaptureClient * pSTVideoCaptureClient ) { (*this).Clear(); // Validate capture format int captureWidth = 0; int captureHeight = 0; switch (videoCaptureFormat) { case BGR320x240: { captureWidth = 320; captureHeight = 240; } break; } if ((0 == captureWidth) || (0 == captureHeight)) { printf( "Invalid capture format." ); (*this).Clear(); return (FALSE); } (*this).mSTVideoCaptureFormat = videoCaptureFormat; // We create a capture window only because we are required to create // such a window to use the Video for Windows (VFW32) API. We make // this window tiny to keep it out of the way of our application. // For our purposes, this window is simply a necessary interface to // VFW functionality. (*this).mHWNDCapture = capCreateCaptureWindowA ( "STVideoCapture Window", (WS_CHILD | WS_VISIBLE | WS_CLIPCHILDREN | WS_CLIPSIBLINGS), // style 0, // x 0, // y 4, // width 4, // height hwndParentWindow, // Parent window : This must be non-null 0 // nID ); if (((HWND)(0)) == (*this).mHWNDCapture) { printf( "Failed to create video capture window." ); DestroyWindow( (*this).mHWNDCapture ); (*this).Clear(); return (FALSE); } (*this).mCapDriverConnectStatus = capDriverConnect( (*this).mHWNDCapture, 0 ); if (0 == (*this).mCapDriverConnectStatus) { printf( "Failed to connect to video driver." ); DestroyWindow( (*this).mHWNDCapture ); (*this).Clear(); return (FALSE); } CAPDRIVERCAPS capdrivercaps; memset( (&(capdrivercaps)), 0, sizeof(capdrivercaps) ); capDriverGetCaps( (*this).mHWNDCapture, &(capdrivercaps), sizeof(capdrivercaps) ); //capDlgVideoFormat( mHWNDCapture ); //capDlgVideoSource( mHWNDCapture ); // Source; Brightness, Contrast, Saturation, Exposure // Allocate a local buffer to store capture data int bytesPerPixel = 3; switch (videoCaptureFormat) { case BGR320x240: { bytesPerPixel = 3; } break; } int totalImageBytes = 0; totalImageBytes = (captureHeight * (captureWidth * bytesPerPixel)); (*this).mBGRData = (unsigned char *) malloc( totalImageBytes ); memset( (*this).mBGRData, 0, totalImageBytes ); // Set the video stream callback function (*this).mpSTVideoCaptureClient = pSTVideoCaptureClient; capSetCallbackOnFrame( (*this).mHWNDCapture, STVideoCapture::CommonCaptureCallback ); // Set the preview rate in milliseconds (*this).mCapturePeriodMilliseconds = capturePeriodMilliseconds; capPreviewRate( (*this).mHWNDCapture, capturePeriodMilliseconds ); // Disable preview mode capPreview( (*this).mHWNDCapture, FALSE ); // Setup the data we want returned to us BITMAPINFO capbitmapinfo; memset( &(capbitmapinfo), 0, sizeof(BITMAPINFO) ); capbitmapinfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); capbitmapinfo.bmiHeader.biWidth = captureWidth; capbitmapinfo.bmiHeader.biHeight = captureHeight; capbitmapinfo.bmiHeader.biPlanes = 1; capbitmapinfo.bmiHeader.biBitCount = 24; capbitmapinfo.bmiHeader.biCompression = BI_RGB; capbitmapinfo.bmiHeader.biSizeImage = totalImageBytes; capbitmapinfo.bmiHeader.biXPelsPerMeter = 100; capbitmapinfo.bmiHeader.biYPelsPerMeter = 100; BOOL setVideoFormatResult = FALSE; setVideoFormatResult = capSetVideoFormat( (*this).mHWNDCapture, &(capbitmapinfo), sizeof(BITMAPINFO) ); if (FALSE == setVideoFormatResult) { printf( "Failed to set the desired video capture format." ); capSetCallbackOnFrame( (*this).mHWNDCapture, NULL ); // disable the callback function DestroyWindow( (*this).mHWNDCapture ); (*this).Clear( ); return (FALSE); } return( TRUE ); }