void CWinVideoGrabber::SetFrameSize(int p_width, int p_height)
{
	if (!inited)
		return;
	
	DWORD t_structure_size;
	t_structure_size = capGetVideoFormat(videowindow, NULL, 0);

	BITMAPINFO *t_bitmap_info;
	t_bitmap_info = (BITMAPINFO *)malloc(t_structure_size);

	capGetVideoFormat(videowindow, t_bitmap_info, t_structure_size);

	t_bitmap_info -> bmiHeader . biWidth = p_width;
	t_bitmap_info -> bmiHeader . biHeight = p_height;

	BOOL t_result;
	t_result = capSetVideoFormat(videowindow, t_bitmap_info, t_structure_size);

	free(t_bitmap_info);

	if (videomode == VIDEOGRABBERMODE_PREVIEWING)
	{
		StopPreviewing();
		StartPreviewing();
	}
}
Exemple #2
0
/**
 * Get original size of camera capture source
 */
static javacall_result camera_get_video_size(javacall_handle handle, 
                                             long* width, long* height)
{
    camera_handle* pHandle = (camera_handle*)handle;
    javacall_result ret = JAVACALL_FAIL;
    BITMAPINFO* pBmpInfo;

    if (pHandle->hCapWnd) {
        int size = capGetVideoFormatSize(pHandle->hCapWnd);
        pBmpInfo = MALLOC(size);
        if (NULL == pBmpInfo) return ret;

        ((LPBITMAPINFOHEADER)pBmpInfo)->biSize= sizeof(BITMAPINFOHEADER);
        if (0 != capGetVideoFormat(pHandle->hCapWnd, pBmpInfo, size)) {
            if (width) { *width = IMAGEWIDTH(pBmpInfo); }
            if (height) { *height = IMAGEHEIGHT(pBmpInfo); }
            /* IMPL_NOTE - Set to 24 bit RGB format */
            if (IMAGEBITS(pBmpInfo) != 24) {
                IMAGEBITS(pBmpInfo) = 24;
                IMAGEBICLRUSED(pBmpInfo) = 0;
                capSetVideoFormat(pHandle->hCapWnd, pBmpInfo, size);
            }
            ret = JAVACALL_OK;
        }

        FREE(pBmpInfo);
    }

    if (width && height) {
      JAVA_DEBUG_PRINT2("[camera] camera_get_video_size %d %d\n", *width, *height);
    }

    return ret;
}
Exemple #3
0
static int _vfw_engine_select_format(VfwEngine *obj){
	BITMAPINFO videoformat;
	MSPixFmt driver_last;
	char compname[5];
	
	capGetVideoFormat(obj->capvideo, &videoformat, sizeof(BITMAPINFO));
	memcpy(compname,&videoformat.bmiHeader.biCompression,4);
	compname[4]='\0';
	ms_message("vfw: camera's current format is '%s' at %ix%i", compname,
			videoformat.bmiHeader.biWidth,videoformat.bmiHeader.biHeight);
	driver_last=ms_fourcc_to_pix_fmt(videoformat.bmiHeader.biCompression);
	if (driver_last!=MS_PIX_FMT_UNKNOWN && try_format(obj,&videoformat,driver_last)){
		ms_message("Using driver last setting");
		obj->pix_fmt=driver_last;
	}else if (try_format(obj,&videoformat,MS_YUV420P)){
		obj->pix_fmt=MS_YUV420P;
		ms_message("Using YUV420P");
	}else if (try_format(obj,&videoformat,MS_RGB24)){
		obj->pix_fmt=MS_RGB24;
		ms_message("Using RGB24");
	}else if (try_format(obj,&videoformat,MS_YUY2)){
		obj->pix_fmt=MS_YUY2;
		ms_message("Using YUY2");
	}else{
		ms_error("v4w: Failed to set any video format.");
		return -1;
	}
	if (obj->pix_fmt==MS_RGB24){
		if (videoformat.bmiHeader.biHeight>0){
			obj->pix_fmt=MS_RGB24_REV;
		}
	}
	return 0;
}
// See notes for _testFrameFormat
BOOL	CAviCap::_testBitsFormat(UINT bits)
{
	LPBITMAPINFO  bmpI;
	BOOL	ret;
	if(!_bmpInfo)
		if(!_getFormat())	return FALSE;

	int x=(int)((LPBITMAPINFOHEADER)_bmpInfo)->biWidth;
	int y=(int)((LPBITMAPINFOHEADER)_bmpInfo)->biHeight;

	
	bmpI=_mk_LPBITMAPINFO(bits,x,y);
	
	ret=capSetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI));
	if(ret)	 //check one's more
		{
		WORD req=((LPBITMAPINFOHEADER)bmpI)->biBitCount;
		capGetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI));
		
		if(((LPBITMAPINFOHEADER)bmpI)->biBitCount!=req)	ret=FALSE;
		
		}
	delete bmpI;
	return ret;
}
void CWinVideoGrabber::GetFrameSize(int *r_width, int *r_height)
{
	if (!inited)
		return;

	DWORD t_structure_size;
	t_structure_size = capGetVideoFormat(videowindow, NULL, 0);

	BITMAPINFO *t_bitmap_info;
	t_bitmap_info = (BITMAPINFO *)malloc(t_structure_size);

	capGetVideoFormat(videowindow, t_bitmap_info, t_structure_size);

	*r_width = t_bitmap_info -> bmiHeader . biWidth;
	*r_height = t_bitmap_info -> bmiHeader . biHeight;

	free(t_bitmap_info);
}
//---------------------------------------------------------------------------------------------------------------------------------------------------------------
//카메라 초기화.
bool CWebCam::StartCam(CWnd* wnd, int nWidth, int nHeight, BITMAPINFO& bmpInfo) //wnd :캡처하는 영상이 나타날, 윈도우의 포인터를 넘겨받는다. 
                                                                                // bmpInfo : 영상의 규격을 얻는것 , 크래스 외부에서 작업을 수행할 수 있다.
{
	// 캡쳐 영상을 출력할 윈도우의 크기 및 위치 조절
	m_hCam = capCreateCaptureWindow("Cam Capture", WS_CHILD | WS_VISIBLE,
		0, 0, nWidth, nHeight, wnd->m_hWnd, NULL);   // 유니코드 지원 여부에 따라 각각 함수 실행. 촬영하는 영상의 규격과는 상관이 없다. 

	if (!capDriverConnect(m_hCam, 0))                //캡처 윈도우를 카메라와 연결한다. 
	{
		AfxMessageBox("연결된 카메라를 찾을 수 없습니다.");
		return false;
	}

	//	capDlgVideoFormat(m_hCam);
	capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO));     //캡처영상을 사용하려면 영상의 규격정보를 알아야한다. 

	// 캡쳐 영상의 크기를 희망 값으로 설정
	bmpInfo.bmiHeader.biWidth = nWidth;
	bmpInfo.bmiHeader.biHeight = nHeight;
	bmpInfo.bmiHeader.biSizeImage = nWidth*nHeight*bmpInfo.bmiHeader.biBitCount / 8;

	if (!capSetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO)))  //카메라의 영상 규격(크기와 색 형식 정보등)을 설정한다. 
	{
		// 설정에 실패하면 원래 값으로 되돌림
		capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO));
	}

	// 캡쳐 영상이 24비트 RGB가 될 수 있도록 설정
	bmpInfo.bmiHeader.biBitCount = 24;
	bmpInfo.bmiHeader.biCompression = BI_RGB;
	if (!capSetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO)))
	{
		// 설정에 실패하면 원래 값으로 되돌림
		capGetVideoFormat(m_hCam, &bmpInfo, sizeof(BITMAPINFO));
	}

	capOverlay(m_hCam, TRUE); //그래픽 하드웨어를 이용할 것인지 결정.   *오버레이를 활성화하면 미리보기가 자동으로 비활성화됨.
	capPreviewRate(m_hCam,30); //미리보기를 갱신하는 시간 간격을 설정.
	capPreview(m_hCam, TRUE);   //미리보기를 활성화 할 것인지 결정.

	return true;
}
Exemple #7
0
static int vfw_set_format (zbar_video_t *vdo,
                           uint32_t fmt)
{
    const zbar_format_def_t *fmtdef = _zbar_format_lookup(fmt);
    if(!fmtdef->format)
        return(err_capture_int(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__,
                               "unsupported vfw format: %x", fmt));

    BITMAPINFOHEADER *bih = vdo->state->bih;
    assert(bih);
    bih->biWidth = vdo->width;
    bih->biHeight = vdo->height;
    switch(fmtdef->group) {
    case ZBAR_FMT_GRAY:
        bih->biBitCount = 8;
        break;
    case ZBAR_FMT_YUV_PLANAR:
    case ZBAR_FMT_YUV_PACKED:
    case ZBAR_FMT_YUV_NV:
        bih->biBitCount = 8 + (16 >> (fmtdef->p.yuv.xsub2 + fmtdef->p.yuv.ysub2));
        break;
    case ZBAR_FMT_RGB_PACKED:
        bih->biBitCount = fmtdef->p.rgb.bpp * 8;
        break;
    default:
        bih->biBitCount = 0;
    }
    bih->biClrUsed = bih->biClrImportant = 0;
    bih->biCompression = fmt;

    zprintf(8, "seting format: %.4s(%08x) " BIH_FMT "\n",
            (char*)&fmt, fmt, BIH_FIELDS(bih));

    if(!capSetVideoFormat(vdo->state->hwnd, bih, vdo->state->bi_size))
        return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__,
                           "setting video format"));

    if(!capGetVideoFormat(vdo->state->hwnd, bih, vdo->state->bi_size))
        return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__,
                           "getting video format"));

    if(bih->biCompression != fmt)
        return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__,
                           "video format set ignored"));

    vdo->format = fmt;
    vdo->width = bih->biWidth;
    vdo->height = bih->biHeight;
    vdo->datalen = bih->biSizeImage;

    zprintf(4, "set new format: %.4s(%08x) " BIH_FMT "\n",
            (char*)&fmt, fmt, BIH_FIELDS(bih));
    return(0);
}
Exemple #8
0
/* 配置视频格式:分辨率和视频格式:RGB/I420 */
void CMainFrame::OnVfwVideoformat()
{
	// TODO: 在此添加命令处理程序代码
	DWORD fsize;
	if(m_caps.fHasDlgVideoFormat){
		capDlgVideoFormat(m_hWndCap);
		fsize = capGetVideoFormatSize(m_hWndCap);
		capGetVideoFormat(m_hWndCap, lpbiIn, fsize);
		Config_XVIECODEC();
	}
}
void	CAviCap::_pushResolution()
{
	_svRes=NULL;
	int _vfs=capGetVideoFormatSize(GetSafeHwnd());
	if(!_vfs)
		return;

	   _svRes=(LPVOID)( new char[_vfs]);
	
	  capGetVideoFormat(GetSafeHwnd(), _svRes, (WORD)_vfs);

}
// Notes:
// 1. This routing may be slow enough
// 2. This routing may cause of driver's notification, such as 
// "not enough memory..." or "...not for this videostandard..." etc
// 3. There is no warranty for this format is really available :-(
// 4. This is potentially DANGEROUS work! Some drivers can "freez"!
BOOL	CAviCap::_testFrameFormat(int x, int y)
{
	LPBITMAPINFO  bmpI;
	BOOL	ret;
	if(!_bmpInfo)
		if(!_getFormat())	return FALSE;

	int biBitCount=(int)((LPBITMAPINFOHEADER)_bmpInfo)->biBitCount;

	switch(biBitCount)
	{
	case 1:	biBitCount=BITS01; break;
	case 4:	biBitCount=BITS04; break;
	case 8:	biBitCount=BITS08; break;
	case 16:biBitCount=BITS16; break;
	case 24:biBitCount=BITS24; break;
	case 32:biBitCount=BITS32; break;
	default: return FALSE;
	}
	
	bmpI=_mk_LPBITMAPINFO(biBitCount,x,y);
	
	ret=capSetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI));
	if(ret)	 //check one's more
		{
		#define _XX()	(int)((LPBITMAPINFOHEADER)bmpI)->biWidth
		#define _YY()	(int)((LPBITMAPINFOHEADER)bmpI)->biHeight
		int x=_XX();
		int y=_YY();
		capGetVideoFormat(GetSafeHwnd(), bmpI, (WORD)_getBMIsize(bmpI));
		if(x!=_XX()||y!=_YY())	ret=FALSE;
	
	//Check is compressed
		if(ret)	{
			_bufferSize = 0;
			//setup special callback for timing
			capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_timerFrameCallbackProc);
			capGrabFrame(GetSafeHwnd());
	
			if(!_1FrameCallBackInstalled)
				capSetCallbackOnFrame(GetSafeHwnd(), _defaultFrameCallbackProc);
			else
				capSetCallbackOnFrame(GetSafeHwnd(),(LPVOID)_1FrameCallbackProc);
		
			_cmprs_formats.Add((BOOL)(_bufferSize!=_calcBufferSize((LPBITMAPINFOHEADER)bmpI)));	
	//end check		
			}
		}
	delete bmpI;
	return ret;
}
Exemple #11
0
/* 对捕获的视频做编码、解码处理 */
void CMainFrame::OnVfwCodec()
{
	// TODO: 在此添加命令处理程序代码
	DWORD fsize;
	/* VCM initialization */
	hic1 = ICOpen(mmioFOURCC('v','i','d','c'),  mmioFOURCC('X','V','I','D'),  ICMODE_COMPRESS);
	if (hic1 == 0) 
		AfxMessageBox(_T("打开编码器失败!"));

	hic2 = ICOpen(mmioFOURCC('v','i','d','c'),  mmioFOURCC('X','V','I','D'),  ICMODE_DECOMPRESS);
	if (hic2 == 0) 
		AfxMessageBox(_T("打开解码器失败!"));

	fsize = capGetVideoFormatSize(m_hWndCap);
	capGetVideoFormat(m_hWndCap, lpbiIn, fsize);
	
	InitAVIWriteOpt();

	lpbiOut->bmiHeader.biSize          = sizeof(BITMAPINFOHEADER);
	lpbiOut->bmiHeader.biWidth         = lpbiIn->bmiHeader.biWidth;
	lpbiOut->bmiHeader.biHeight        = lpbiIn->bmiHeader.biHeight;
	lpbiOut->bmiHeader.biPlanes        = 1;
	lpbiOut->bmiHeader.biBitCount      = 24;
	lpbiOut->bmiHeader.biCompression   = BI_RGB;
	lpbiOut->bmiHeader.biSizeImage     = lpbiIn->bmiHeader.biWidth*lpbiIn->bmiHeader.biHeight*3;
	lpbiOut->bmiHeader.biXPelsPerMeter = 0;
	lpbiOut->bmiHeader.biYPelsPerMeter = 0;
	lpbiOut->bmiHeader.biClrUsed       = 0;
	lpbiOut->bmiHeader.biClrImportant  = 0;

//	get the format of the input video
	if (ICCompressGetFormat(hic1,lpbiIn,lpbiTmp)!=ICERR_OK) 
		AfxMessageBox(_T("编码器不能读取输出格式!"));
	if (ICCompressQuery(hic1,lpbiIn,lpbiTmp) != ICERR_OK)   
		AfxMessageBox(_T("不能处理编码器输入输出格式!"));

//	set the parameters of the CODEC
	pc.cbSize         = sizeof(COMPVARS);			//结构体大小
	pc.dwFlags        = ICMF_COMPVARS_VALID;
	pc.hic            = hic1;						//编码器句柄
	pc.fccType        = mmioFOURCC('v','i','d','c');
	pc.fccHandler     = mmioFOURCC('X','V','I','D');
	pc.lpbiOut        = lpbiTmp;					//输出格式
	pc.lKey           = 100;						//key帧频率
	pc.lQ             = 10000;						//图像质量

	if(!ICSeqCompressFrameStart(&pc, lpbiIn))
		return;
	ICDecompressBegin(hic2,lpbiTmp,lpbiOut);
	m_vfwState  = ENCDEC;
}
Exemple #12
0
static bool_t try_format(VfwEngine *s, BITMAPINFO *videoformat, MSPixFmt pixfmt){
	MSVideoSize tried_size=s->vsize;
	bool_t ret;
	do{
		capGetVideoFormat(s->capvideo, videoformat, sizeof(BITMAPINFO));
		videoformat->bmiHeader.biSizeImage = 0;
		videoformat->bmiHeader.biWidth  = tried_size.width;
		videoformat->bmiHeader.biHeight = tried_size.height;
		switch(pixfmt){
			case MS_YUV420P:
				videoformat->bmiHeader.biBitCount = 12;
				videoformat->bmiHeader.biCompression=MAKEFOURCC('I','4','2','0');
			break;
			case MS_YUY2:
				videoformat->bmiHeader.biBitCount = 16;
				videoformat->bmiHeader.biCompression=MAKEFOURCC('Y','U','Y','2');
			break;
			case MS_RGB24:
				videoformat->bmiHeader.biBitCount = 24;
				videoformat->bmiHeader.biCompression=BI_RGB;
			break;
			default:
				return FALSE;
		}
		ms_message("Trying video size %ix%i",tried_size.width,tried_size.height);
		ret=capSetVideoFormat(s->capvideo, videoformat, sizeof(BITMAPINFO));
		tried_size=ms_video_size_get_just_lower_than(tried_size);
	}while(ret==FALSE && tried_size.width!=0);
	if (ret) {
		/*recheck video format */
		capGetVideoFormat(s->capvideo, videoformat, sizeof(BITMAPINFO));
		s->vsize.width=videoformat->bmiHeader.biWidth;
		s->vsize.height=videoformat->bmiHeader.biHeight;
	}
	return ret;
}
void MMCapture::captureInit(UINT framesPerSecond, UINT audioBufferSize) {
  DWORD style = WS_CHILD;

  m_captureWindow = capCreateCaptureWindow(_T("my capture window"), style,0,0,640,480,m_receiver.getWindow(),1);
  if(m_captureWindow == NULL) {
    throwException(_T("%s:Cannot create CaptureWindow:%s"),__TFUNCTION__,getLastErrorText().cstr());
  }

  try {
    CHECKRESULT(capSetUserData(  m_captureWindow, this));
    if(captureVideo()) {
      CHECKRESULT(capDriverConnect(m_captureWindow, 0   ));
      m_webCamConnected = true;
    }

    CAPTUREPARMS param;

    CHECKRESULT(capCaptureGetSetup(m_captureWindow,&param,sizeof(param)));
    param.dwRequestMicroSecPerFrame = 1000000 / framesPerSecond;
    param.fYield            = TRUE;
    param.AVStreamMaster    = AVSTREAMMASTER_AUDIO; // AVSTREAMMASTER_NONE;
    param.dwAudioBufferSize = audioBufferSize;

    CHECKRESULT(capCaptureSetSetup(m_captureWindow,&param,sizeof(param)));
    if(captureAudio()) {
      int audioFormatSize = capGetAudioFormat(m_captureWindow,&m_audioFormat, sizeof(m_audioFormat));
      CHECKRESULT(capSetCallbackOnWaveStream( m_captureWindow, captureWaveStreamCallback));
    }
    if(captureVideo()) {
      int videoFormatSize = capGetVideoFormat(m_captureWindow,&m_videoFormat, sizeof(m_videoFormat));
      CHECKRESULT(capSetCallbackOnVideoStream(m_captureWindow, captureVideoStreamCallback));
      CHECKRESULT(capSetCallbackOnFrame(      m_captureWindow, captureFrameCallback));
    }
    CHECKRESULT(capSetCallbackOnStatus(     m_captureWindow, captureStatusCallback));
    CHECKRESULT(capSetCallbackOnCapControl( m_captureWindow, captureControlCallback));
    CHECKRESULT(capSetCallbackOnError(      m_captureWindow, captureErrorCallback));

    if(captureAudio() && m_playAudio) {
      m_audioThread = new AudioPlayerThread(*this); TRACE_NEW(m_audioThread);
      m_audioThread->start();
    }
  } catch(...) {
    captureCleanup();
    throw;
  }
}
Exemple #14
0
static int vfw_probe_format (zbar_video_t *vdo,
                             uint32_t fmt)
{
    const zbar_format_def_t *fmtdef = _zbar_format_lookup(fmt);
    if(!fmtdef)
        return(0);

    zprintf(4, "    trying %.4s(%08x)...\n", (char*)&fmt, fmt);
    BITMAPINFOHEADER *bih = vdo->state->bih;
    bih->biWidth = vdo->width;
    bih->biHeight = vdo->height;
    switch(fmtdef->group) {
    case ZBAR_FMT_GRAY:
        bih->biBitCount = 8;
        break;
    case ZBAR_FMT_YUV_PLANAR:
    case ZBAR_FMT_YUV_PACKED:
    case ZBAR_FMT_YUV_NV:
        bih->biBitCount = 8 + (16 >> (fmtdef->p.yuv.xsub2 + fmtdef->p.yuv.ysub2));
        break;
    case ZBAR_FMT_RGB_PACKED:
        bih->biBitCount = fmtdef->p.rgb.bpp * 8;
        break;
    default:
        bih->biBitCount = 0;
    }
    bih->biCompression = fmt;

    if(!capSetVideoFormat(vdo->state->hwnd, bih, vdo->state->bi_size)) {
        zprintf(4, "\tno (set fails)\n");
        return(0);
    }

    if(!capGetVideoFormat(vdo->state->hwnd, bih, vdo->state->bi_size))
        return(0/*FIXME error...*/);

    zprintf(6, "\tactual: " BIH_FMT "\n", BIH_FIELDS(bih));

    if(bih->biCompression != fmt) {
        zprintf(4, "\tno (set ignored)\n");
        return(0);
    }

    zprintf(4, "\tyes\n");
    return(1);
}
Exemple #15
0
bool CVideoCap::Initialize(int nWidth, int nHeight)
{
//	CAPTUREPARMS	gCapTureParms ; //视频驱动器的能力
	CAPDRIVERCAPS	gCapDriverCaps;
	DWORD			dwSize;

	if (!IsWebCam())
		return false;

	capSetUserData(m_hWndCap, this);
	
	capSetCallbackOnError(m_hWndCap, capErrorCallback);
	if (!capSetCallbackOnFrame(m_hWndCap, FrameCallbackProc))
	{
		return false;
	}

	// 将捕获窗同驱动器连接
	int i;
	for (i = 0; i < 10; i++)
	{
		if (capDriverConnect(m_hWndCap, i))
			break;
	}
	if (i == 10)
		return false;
	
	
	dwSize = capGetVideoFormatSize(m_hWndCap);
	m_lpbmi = new BITMAPINFO;

	// M263只支持176*144 352*288 (352*288 24彩的试验只支持biPlanes = 1)
	capGetVideoFormat(m_hWndCap, m_lpbmi, dwSize);
	// 采用指定的大小
	if (nWidth && nHeight)
	{
 		m_lpbmi->bmiHeader.biWidth = nWidth;
 		m_lpbmi->bmiHeader.biHeight = nHeight;
		m_lpbmi->bmiHeader.biPlanes = 1;
		m_lpbmi->bmiHeader.biSizeImage = (((m_lpbmi->bmiHeader.biWidth * m_lpbmi->bmiHeader.biBitCount + 31) & ~31) >> 3) * m_lpbmi->bmiHeader.biHeight;
		// 实验得知一些摄像头不支持指定的分辩率
 		if (!capSetVideoFormat(m_hWndCap, m_lpbmi, sizeof(BITMAPINFO)))
			return false;
	}
Exemple #16
0
static int vfw_probe (zbar_video_t *vdo)
{
    video_state_t *state = vdo->state;
    state->bi_size = capGetVideoFormatSize(state->hwnd);
    BITMAPINFOHEADER *bih = state->bih = realloc(state->bih, state->bi_size);
    /* FIXME check OOM */

    if(!capSetUserData(state->hwnd, (LONG)vdo) ||
       !state->bi_size || !bih ||
       !capGetVideoFormat(state->hwnd, bih, state->bi_size))
        return(err_capture(vdo, SEV_ERROR, ZBAR_ERR_INVALID, __func__,
                           "setting up video capture"));

    zprintf(3, "initial format: " BIH_FMT " (bisz=%x)\n",
            BIH_FIELDS(bih), state->bi_size);

    if(!vdo->width || !vdo->height) {
        vdo->width = bih->biWidth;
        vdo->height = bih->biHeight;
    }
    vdo->datalen = bih->biSizeImage;

    zprintf(2, "probing supported formats:\n");
    vdo->formats = calloc(VFW_NUM_FORMATS, sizeof(uint32_t));

    int n = 0;
    const uint32_t *fmt;
    for(fmt = vfw_formats; *fmt; fmt++)
        if(vfw_probe_format(vdo, *fmt))
            vdo->formats[n++] = *fmt;

    vdo->formats = realloc(vdo->formats, (n + 1) * sizeof(uint32_t));

    vdo->width = bih->biWidth;
    vdo->height = bih->biHeight;
    vdo->intf = VIDEO_VFW;
    vdo->init = vfw_init;
    vdo->start = vfw_start;
    vdo->stop = vfw_stop;
    vdo->cleanup = vfw_cleanup;
    vdo->nq = vfw_nq;
    vdo->dq = vfw_dq;
    return(0);
}
Exemple #17
0
// Frame data is stored in lpVHdr 
static LRESULT CALLBACK FrameCallbackProc(HWND hWnd, LPVIDEOHDR lpVHdr)
{
	// If no data provided by driver (dropped frame) - nothing to do
	if (lpVHdr->dwBytesUsed == 0) return FALSE;
	
	int grayScaleSize = lpVHdr->dwBytesUsed/3; // RGB uses 24 BPP, GS is 8 BPP

	// Get pointer to our video grabber - remember, this is friend function
	VideoGrabber* videoGrabber = (VideoGrabber*) capGetUserData(hWnd);
	if (videoGrabber->mGrabNextFrame)
	{
		// Get video format from driver (including resolution)
		if (videoGrabber->mBitmapInfo == NULL) 
		{
			// All these lines are run only once! I put them here and not in the constructor \
			   because I need to run them in context of callback. Strange though...
			DWORD videoFormatSize = capGetVideoFormatSize(videoGrabber->camhwnd);
			videoGrabber->mBitmapInfo = (PBITMAPINFO) new char[videoFormatSize];	
			capGetVideoFormat(videoGrabber->camhwnd, videoGrabber->mBitmapInfo, videoFormatSize);
			videoGrabber->mCurrentFrameGS = new BYTE[grayScaleSize];
			videoGrabber->mCurrentFrameBlurred = new BYTE[grayScaleSize];
			videoGrabber->mPreviousFrame = new BYTE[grayScaleSize];
		}

		ApplyGrayScaleFilter(lpVHdr, videoGrabber->mCurrentFrameGS); // Pass current frame data to grayscale it
		// Blurring decreases noise. mBitmapInfo contains frame dimensions (width & height)
		ApplyAverageBlurFilter(videoGrabber->mCurrentFrameGS, videoGrabber->mBitmapInfo, videoGrabber->mCurrentFrameBlurred);

		if (videoGrabber->mPreviousFrameExists)
		{
			// Calculate difference between frames
			int differedPixelsNum = CompareFrames(videoGrabber->mCurrentFrameBlurred, videoGrabber->mPreviousFrame, 
				videoGrabber->mBitmapInfo, videoGrabber->PIXELS_DIFFERENCE_TRESHOLD);
			videoGrabber->mMotionDetectedDuringLastSecond = 
				(differedPixelsNum > videoGrabber->MOTION_TRESHOLD); // Motion detected!
		}

		memcpy(videoGrabber->mPreviousFrame, videoGrabber->mCurrentFrameBlurred, grayScaleSize);
		videoGrabber->mPreviousFrameExists = TRUE;		// Now we have frame to compare with
		videoGrabber->mGrabNextFrame = FALSE;			// frame for current second has been processed
		SetEvent(videoGrabber->mFrameProcessedEvent);	// Signal about frame processing completion
	}
Exemple #18
0
/**
 * Set camera snapshot size
 */
static javacall_result camera_set_video_size(javacall_handle handle, 
                                             long width, long height)
{
    camera_handle* pHandle = (camera_handle*)handle;
    javacall_result ret = JAVACALL_FAIL;
    BITMAPINFO* pBmpInfo;

    JAVA_DEBUG_PRINT2("[camera] camera_set_video_size %d %d\n", width, height);

    /* Convert to supported width & height */
    if (width <= 160) {
        width = 160;
        height = 120;
    } else if (width <= 320) {
        width = 320;
        height = 240;
    } else {
        width = 640;
        height = 480;
    }

    if (pHandle->hCapWnd) {
        int size = capGetVideoFormatSize(pHandle->hCapWnd);
        pBmpInfo = MALLOC(size);
        if (NULL == pBmpInfo) return ret;

        ((LPBITMAPINFOHEADER)pBmpInfo)->biSize= sizeof(BITMAPINFOHEADER);
        if (0 != capGetVideoFormat(pHandle->hCapWnd, pBmpInfo, size)) {
            IMAGEBITS(pBmpInfo) = 24;
            IMAGEBICLRUSED(pBmpInfo) = 0;
            IMAGEWIDTH(pBmpInfo) = width;
            IMAGEHEIGHT(pBmpInfo) = height;
            capSetVideoFormat(pHandle->hCapWnd, pBmpInfo, size);
            ret = JAVACALL_OK;
        }
        FREE(pBmpInfo);
    }

    return ret;
}
BOOL	CAviCap::_getFormat()
{
	int vfs=capGetVideoFormatSize(GetSafeHwnd());
	
	if(!vfs)		return FALSE;

	 if(_bmpInfo)	{delete _bmpInfo;_bmpInfo=NULL;}
	  _bmpInfo =(BITMAPINFO	*)( new char[vfs]);
	
	LPBITMAPINFOHEADER bmpIH=( LPBITMAPINFOHEADER )_bmpInfo;
	 
	 bmpIH->biSize= sizeof BITMAPINFOHEADER;
	  BOOL ret=capGetVideoFormat(GetSafeHwnd(), _bmpInfo, (WORD)vfs);
	
	if(ret && _autosize)
	{
	#define XX	bmpIH->biWidth
	#define	YY	bmpIH->biHeight
	// Adjust size&position now!
	CRect	rc;
	CRect	rcc;
		CWnd  *parent = GetParent();
		if(!parent)  parent=CWnd::GetDesktopWindow( );
		ASSERT(parent);
		GetClientRect(&rcc);
		parent->GetClientRect(&rc);
		
		{
		int x = XX>rc.Width() ? 0 : (rc.Width()-XX)/2;
		int y = YY>rc.Height() ? 0 : (rc.Height()-YY)/2;
		SetWindowPos(NULL,
			x, y, XX,YY, SWP_NOZORDER);
		}

	}
	
	  
	 return   ret;

}
Exemple #20
0
/*初始化VFW设备*/ 
void CMainFrame::OnVfwInitvfw()
{
	// TODO: 在此添加命令处理程序代码
	DWORD fsize;

	// 创建视频窗口
	if(!m_wndSource.CreateEx(WS_EX_TOPMOST,NULL,
							_T("Source"),WS_OVERLAPPED|WS_CAPTION,
							CRect(0,0,352,288),NULL,0))
		return;
	
	m_hWndCap = capCreateCaptureWindow(_T("Capture Window"),WS_CHILD|WS_VISIBLE,
									  0,0,352,288,
									  m_wndSource.m_hWnd,0);

	//m_wndSource.ShowWindow(SW_SHOW);
	// 注册回调函数
	capSetCallbackOnError(m_hWndCap,(FARPROC)ErrorCallbackProc);
	capSetCallbackOnStatus(m_hWndCap,(FARPROC)StatusCallbackProc);
	capSetCallbackOnVideoStream(m_hWndCap,(FARPROC)VideoCallbackProc);

	// 连接视频设备
	capDriverConnect(m_hWndCap,0);	//(HWND m_hWndCap, int index);//index : 0--9
	// 获取驱动器的性能参数
	capDriverGetCaps(m_hWndCap,&m_caps,sizeof(CAPDRIVERCAPS));
	if (m_caps.fHasOverlay)
		capOverlay(m_hWndCap,TRUE);
	// 设置预览速率开始预览
	capPreviewRate(m_hWndCap,1000/25);
	capPreview(m_hWndCap,bPreview);


	fsize = capGetVideoFormatSize(m_hWndCap);
	capGetVideoFormat(m_hWndCap, lpbiIn, fsize);
	
	AfxMessageBox(_T("初始化成功!"));
}
Exemple #21
0
void *cap_getformat(cap_cx *cx, size_t *pfmtlen, char *err)
{
	void *pfmt;
	size_t fmtlen;

	if(cx->opened)
	{
		if((fmtlen = (size_t)capGetVideoFormatSize(cx->hwnd)))
		{
			if((pfmt = malloc(fmtlen)))
			{
				if(capGetVideoFormat(cx->hwnd, pfmt, fmtlen))
				{
					set_err("");
					*pfmtlen = fmtlen;
					return pfmt;
				}
				free(pfmt);
			}
		}
	}
	set_err("Can't get video format");
	return 0;
}
Exemple #22
0
// Initialize camera input
bool CvCaptureCAM_VFW::open( int wIndex )
{
    char szDeviceName[80];
    char szDeviceVersion[80];
    HWND hWndC = 0;

    close();

    if( (unsigned)wIndex >= 10 )
        wIndex = 0;

    for( ; wIndex < 10; wIndex++ )
    {
        if( capGetDriverDescription( wIndex, szDeviceName,
            sizeof (szDeviceName), szDeviceVersion,
            sizeof (szDeviceVersion)))
        {
            hWndC = capCreateCaptureWindow ( "My Own Capture Window",
                WS_POPUP | WS_CHILD, 0, 0, 320, 240, 0, 0);
            if( capDriverConnect (hWndC, wIndex))
                break;
            DestroyWindow( hWndC );
            hWndC = 0;
        }
    }

    if( hWndC )
    {
        capWnd = hWndC;
        hdr = 0;
        hic = 0;
        fourcc = (DWORD)-1;

        memset( &caps, 0, sizeof(caps));
        capDriverGetCaps( hWndC, &caps, sizeof(caps));
        CAPSTATUS status = {};
        capGetStatus(hWndC, &status, sizeof(status));
        ::SetWindowPos(hWndC, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE);
        capSetUserData( hWndC, (size_t)this );
        capSetCallbackOnFrame( hWndC, frameCallback );
        CAPTUREPARMS p;
        capCaptureGetSetup(hWndC,&p,sizeof(CAPTUREPARMS));
        p.dwRequestMicroSecPerFrame = 66667/2; // 30 FPS
        capCaptureSetSetup(hWndC,&p,sizeof(CAPTUREPARMS));
        //capPreview( hWndC, 1 );
        capPreviewScale(hWndC,FALSE);
        capPreviewRate(hWndC,1);

        // Get frame initial parameters.
        const DWORD size = capGetVideoFormatSize(capWnd);
        if( size > 0 )
        {
            unsigned char *pbi = new unsigned char[size];
            if( pbi )
            {
                if( capGetVideoFormat(capWnd, pbi, size) == size )
                {
                    BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader;
                    widthSet = vfmt.biWidth;
                    heightSet = vfmt.biHeight;
                    fourcc = vfmt.biCompression;
                }
                delete []pbi;
            }
        }
        // And alternative way in case of failure.
        if( widthSet == 0 || heightSet == 0 )
        {
            widthSet = status.uiImageWidth;
            heightSet = status.uiImageHeight;
        }

    }
    return capWnd != 0;
}
	void CSoCProjectView::OnInitialUpdate()
	{
		CScrollView::OnInitialUpdate();

		CSize sizeTotal;
		// TODO: 이 뷰의 전체 크기를 계산합니다.
		sizeTotal.cx = 640;
		sizeTotal.cy = 480;
		SetScrollSizes(MM_TEXT, sizeTotal);

		CSoCProjectDoc*pDoc = GetDocument();
		// TODO: 여기에 생성 코드를 추가합니다.
		RECT r;
		GetClientRect(&r);
		pDoc->m_hCamWnd = capCreateCaptureWindow(
			_T("Capture Window"), WS_CHILD | WS_VISIBLE, 5, 5, r.right-5, r.bottom-5, this->m_hWnd, NULL);

		// 설치된 디바이스를 순서로 0 ~ 9까지의 카메라를 지정 할 수 있다.
		if(!capDriverConnect(pDoc->m_hCamWnd, 0)) AfxMessageBox(_T("웹캠 인식 실패 ㅠㅠ"));

		// 현재 드라이버 정보에 관한 내용 얻어오기
		capDriverGetCaps(pDoc->m_hCamWnd, &pDoc->m_psCapsInfo, sizeof(pDoc->m_psCapsInfo));

		//비디오 포맷 변환을 지원하는지 확인한다.
		if(pDoc->m_psCapsInfo.fHasDlgVideoFormat) {
			// 비디오 포맷 변환을 지원하면 아래 함수를 호출한다.
			// 호출되는 함수는 새로운 다이얼로그를 호출하고 해상도와 포맷형식, 프레임 버퍼크기등을 지정할 수 있다.
			// 이때, 지원되지 않는 비디오포멧을 설정하면 검정 화면을 보게될 것이야...
			capDlgVideoFormat(pDoc->m_hCamWnd);
		}

		// m_psCapsInfo.fHasOverlay에서 overlay가 지원이 되지 않으면(=0) 사용 할 수 없다.
		if(pDoc->m_psCapsInfo.fHasOverlay) {
			// 하드웨어 오버레이는 시스템 부하를 줄여준다.(optioinal)
			capOverlay(pDoc->m_hCamWnd, FALSE);
		}

		// BITMAPINFO 설정
		capGetVideoFormat(pDoc->m_hCamWnd, &pDoc->m_BTMInfo, sizeof(pDoc->m_BTMInfo));

		// 1/1000 단위로 영상이 출력된다.
		capPreviewRate(pDoc->m_hCamWnd, 1);

		// 프리뷰 영상을 재생한다.
		capPreview(pDoc->m_hCamWnd, TRUE);

		CAPTUREPARMS cp;     
	
		capCaptureGetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) );	// get the current defaults      
	
		cp.dwRequestMicroSecPerFrame = 1;					// Set desired frame rate     
		cp.fMakeUserHitOKToCapture   = FALSE;
		cp.fYield                    = TRUE;                // we want capture on a background thread.
		cp.wNumVideoRequested        = (WORD) 1;			// we may get less than this - no problem
		cp.fCaptureAudio             = FALSE;     
		cp.vKeyAbort                 = 0;                   // If no key is provided, it won't stop...
		cp.fAbortLeftMouse           = FALSE;
		cp.fAbortRightMouse          = FALSE;
		cp.fLimitEnabled             = FALSE;				// we want to stop     
		cp.fMCIControl               = FALSE;
	
		capCaptureSetSetup(pDoc->m_hCamWnd, &cp, sizeof(cp) ); 
		
		capSetCallbackOnVideoStream(pDoc->m_hCamWnd, VideoCallbackProc);
		capSetCallbackOnFrame(pDoc->m_hCamWnd, VideoCallbackProc);
	}
Exemple #24
0
IplImage* CvCaptureCAM_VFW::retrieveFrame(int)
{
    BITMAPINFO vfmt;
    memset( &vfmt, 0, sizeof(vfmt));
    BITMAPINFOHEADER& vfmt0 = vfmt.bmiHeader;

    if( !capWnd )
        return 0;

    const DWORD sz = capGetVideoFormat( capWnd, &vfmt, sizeof(vfmt));
    const int prevWidth = frame ? frame->width : 0;
    const int prevHeight = frame ? frame->height : 0;

    if( !hdr || hdr->lpData == 0 || sz == 0 )
        return 0;

    if( !frame || frame->width != vfmt0.biWidth || frame->height != vfmt0.biHeight )
    {
        cvReleaseImage( &frame );
        frame = cvCreateImage( cvSize( vfmt0.biWidth, vfmt0.biHeight ), 8, 3 );
    }

    if ( vfmt0.biCompression == MAKEFOURCC('N','V','1','2') )
    {
        // Frame is in YUV 4:2:0 NV12 format, convert to BGR color space
        // See https://msdn.microsoft.com/en-us/library/windows/desktop/dd206750(v=vs.85).aspx#nv12)
        IplImage src;
        cvInitImageHeader( &src, cvSize( vfmt0.biWidth, vfmt0.biHeight * 3 / 2 ), IPL_DEPTH_8U, 1, IPL_ORIGIN_BL, 4 );
        cvSetData( &src, hdr->lpData, src.widthStep );
        cvCvtColor( &src, frame, CV_YUV2BGR_NV12 );
    }
    else if( vfmt0.biCompression != BI_RGB ||
             vfmt0.biBitCount != 24 )
    {
        BITMAPINFOHEADER vfmt1 = icvBitmapHeader( vfmt0.biWidth, vfmt0.biHeight, 24 );

        if( hic == 0 || fourcc != vfmt0.biCompression ||
            prevWidth != vfmt0.biWidth || prevHeight != vfmt0.biHeight )
        {
            closeHIC();
            hic = ICOpen( MAKEFOURCC('V','I','D','C'),
                          vfmt0.biCompression, ICMODE_DECOMPRESS );
            if( hic )
            {
                if( ICDecompressBegin( hic, &vfmt0, &vfmt1 ) != ICERR_OK )
                {
                    closeHIC();
                    return 0;
                }
            }
        }

        if( !hic || ICDecompress( hic, 0, &vfmt0, hdr->lpData,
            &vfmt1, frame->imageData ) != ICERR_OK )
        {
            closeHIC();
            return 0;
        }

        cvFlip( frame, frame, 0 );
    }
    else
    {
        IplImage src;
        cvInitImageHeader( &src, cvSize(vfmt0.biWidth, vfmt0.biHeight),
            IPL_DEPTH_8U, 3, IPL_ORIGIN_BL, 4 );
        cvSetData( &src, hdr->lpData, src.widthStep );
        cvFlip( &src, frame, 0 );
    }

    return frame;
}
Exemple #25
0
IplImage* CvCaptureCAM_VFW::retrieveFrame(int)
{
    BITMAPINFO vfmt;
    memset( &vfmt, 0, sizeof(vfmt));
    BITMAPINFOHEADER& vfmt0 = vfmt.bmiHeader;

    if( !capWnd )
        return 0;

    const DWORD sz = capGetVideoFormat( capWnd, &vfmt, sizeof(vfmt));
    const int prevWidth = frame ? frame->width : 0;
    const int prevHeight = frame ? frame->height : 0;

    if( !hdr || hdr->lpData == 0 || sz == 0 )
        return 0;

    if( !frame || frame->width != vfmt0.biWidth || frame->height != vfmt0.biHeight )
    {
        cvReleaseImage( &frame );
        frame = cvCreateImage( cvSize( vfmt0.biWidth, vfmt0.biHeight ), 8, 3 );
    }

    if( vfmt0.biCompression != BI_RGB ||
        vfmt0.biBitCount != 24 )
    {
        BITMAPINFOHEADER vfmt1 = icvBitmapHeader( vfmt0.biWidth, vfmt0.biHeight, 24 );

        if( hic == 0 || fourcc != vfmt0.biCompression ||
            prevWidth != vfmt0.biWidth || prevHeight != vfmt0.biHeight )
        {
            closeHIC();
            hic = ICOpen( MAKEFOURCC('V','I','D','C'),
                          vfmt0.biCompression, ICMODE_DECOMPRESS );
            if( hic )
            {
                if( ICDecompressBegin( hic, &vfmt0, &vfmt1 ) != ICERR_OK )
                {
                    closeHIC();
                    return 0;
                }
            }
        }

        if( !hic || ICDecompress( hic, 0, &vfmt0, hdr->lpData,
            &vfmt1, frame->imageData ) != ICERR_OK )
        {
            closeHIC();
            return 0;
        }

        cvFlip( frame, frame, 0 );
    }
    else
    {
        IplImage src;
        cvInitImageHeader( &src, cvSize(vfmt0.biWidth, vfmt0.biHeight),
            IPL_DEPTH_8U, 3, IPL_ORIGIN_BL, 4 );
        cvSetData( &src, hdr->lpData, src.widthStep );
        cvFlip( &src, frame, 0 );
    }

    return frame;
}
int
camera_device_start_capturing(CameraDevice* cd,
                              uint32_t pixel_format,
                              int frame_width,
                              int frame_height)
{
    WndCameraDevice* wcd;
    HBITMAP bm_handle;
    BITMAP  bitmap;
    size_t format_info_size;

    if (cd == NULL || cd->opaque == NULL) {
        E("%s: Invalid camera device descriptor", __FUNCTION__);
        return -1;
    }
    wcd = (WndCameraDevice*)cd->opaque;

    /* wcd->dc is an indicator of capturing: !NULL - capturing, NULL - not */
    if (wcd->dc != NULL) {
        W("%s: Capturing is already on on device '%s'",
          __FUNCTION__, wcd->window_name);
        return 0;
    }

    /* Connect capture window to the video capture driver. */
    if (!capDriverConnect(wcd->cap_window, wcd->input_channel)) {
        return -1;
    }

    /* Get current frame information from the driver. */
    format_info_size = capGetVideoFormatSize(wcd->cap_window);
    if (format_info_size == 0) {
        E("%s: Unable to get video format size: %d",
          __FUNCTION__, GetLastError());
        _camera_device_reset(wcd);
        return -1;
    }
    wcd->frame_bitmap = (BITMAPINFO*)malloc(format_info_size);
    if (wcd->frame_bitmap == NULL) {
        E("%s: Unable to allocate frame bitmap info buffer", __FUNCTION__);
        _camera_device_reset(wcd);
        return -1;
    }
    if (!capGetVideoFormat(wcd->cap_window, wcd->frame_bitmap,
                           format_info_size)) {
        E("%s: Unable to obtain video format: %d", __FUNCTION__, GetLastError());
        _camera_device_reset(wcd);
        return -1;
    }

    /* Lets see if we need to set different frame dimensions */
    if (wcd->frame_bitmap->bmiHeader.biWidth != frame_width ||
            abs(wcd->frame_bitmap->bmiHeader.biHeight) != frame_height) {
        /* Dimensions don't match. Set new frame info. */
        wcd->frame_bitmap->bmiHeader.biWidth = frame_width;
        wcd->frame_bitmap->bmiHeader.biHeight = frame_height;
        /* We need to recalculate image size, since the capture window / driver
         * will use image size provided by us. */
        if (wcd->frame_bitmap->bmiHeader.biBitCount == 24) {
            /* Special case that may require WORD boundary alignment. */
            uint32_t bpl = (frame_width * 3 + 1) & ~1;
            wcd->frame_bitmap->bmiHeader.biSizeImage = bpl * frame_height;
        } else {
            wcd->frame_bitmap->bmiHeader.biSizeImage =
                (frame_width * frame_height * wcd->frame_bitmap->bmiHeader.biBitCount) / 8;
        }
        if (!capSetVideoFormat(wcd->cap_window, wcd->frame_bitmap,
                               format_info_size)) {
            E("%s: Unable to set video format: %d", __FUNCTION__, GetLastError());
            _camera_device_reset(wcd);
            return -1;
        }
    }

    if (wcd->frame_bitmap->bmiHeader.biCompression > BI_PNG) {
        D("%s: Video capturing driver has reported pixel format %.4s",
          __FUNCTION__, (const char*)&wcd->frame_bitmap->bmiHeader.biCompression);
    }

    /* Most of the time frame bitmaps come in "bottom-up" form, where its origin
     * is the lower-left corner. However, it could be in the normal "top-down"
     * form with the origin in the upper-left corner. So, we must adjust the
     * biHeight field, since the way "top-down" form is reported here is by
     * setting biHeight to a negative value. */
    if (wcd->frame_bitmap->bmiHeader.biHeight < 0) {
        wcd->frame_bitmap->bmiHeader.biHeight =
            -wcd->frame_bitmap->bmiHeader.biHeight;
        wcd->is_top_down = 1;
    } else {
        wcd->is_top_down = 0;
    }

    /* Get DC for the capturing window that will be used when we deal with
     * bitmaps obtained from the camera device during frame capturing. */
    wcd->dc = GetDC(wcd->cap_window);
    if (wcd->dc == NULL) {
        E("%s: Unable to obtain DC for %s: %d",
          __FUNCTION__, wcd->window_name, GetLastError());
        _camera_device_reset(wcd);
        return -1;
    }

    /*
     * At this point we need to grab a frame to properly setup framebuffer, and
     * calculate pixel format. The problem is that bitmap information obtained
     * from the driver doesn't necessarily match the actual bitmap we're going to
     * obtain via capGrabFrame / capEditCopy / GetClipboardData
     */

    /* Grab a frame, and post it to the clipboard. Not very effective, but this
     * is how capXxx API is operating. */
    if (!capGrabFrameNoStop(wcd->cap_window) ||
        !capEditCopy(wcd->cap_window) ||
        !OpenClipboard(wcd->cap_window)) {
        E("%s: Device '%s' is unable to save frame to the clipboard: %d",
          __FUNCTION__, wcd->window_name, GetLastError());
        _camera_device_reset(wcd);
        return -1;
    }

    /* Get bitmap handle saved into clipboard. Note that bitmap is still
     * owned by the clipboard here! */
    bm_handle = (HBITMAP)GetClipboardData(CF_BITMAP);
    if (bm_handle == NULL) {
        E("%s: Device '%s' is unable to obtain frame from the clipboard: %d",
          __FUNCTION__, wcd->window_name, GetLastError());
        CloseClipboard();
        _camera_device_reset(wcd);
        return -1;
    }

    /* Get bitmap object that is initialized with the actual bitmap info. */
    if (!GetObject(bm_handle, sizeof(BITMAP), &bitmap)) {
        E("%s: Device '%s' is unable to obtain frame's bitmap: %d",
          __FUNCTION__, wcd->window_name, GetLastError());
        CloseClipboard();
        _camera_device_reset(wcd);
        return -1;
    }

    /* Now that we have all we need in 'bitmap' */
    CloseClipboard();

    /* Make sure that dimensions match. Othewise - fail. */
    if (wcd->frame_bitmap->bmiHeader.biWidth != bitmap.bmWidth ||
        wcd->frame_bitmap->bmiHeader.biHeight != bitmap.bmHeight ) {
        E("%s: Requested dimensions %dx%d do not match the actual %dx%d",
          __FUNCTION__, frame_width, frame_height,
          wcd->frame_bitmap->bmiHeader.biWidth,
          wcd->frame_bitmap->bmiHeader.biHeight);
        _camera_device_reset(wcd);
        return -1;
    }

    /* Create bitmap info that will be used with GetDIBits. */
    wcd->gdi_bitmap = (BITMAPINFO*)malloc(wcd->frame_bitmap->bmiHeader.biSize);
    if (wcd->gdi_bitmap == NULL) {
        E("%s: Unable to allocate gdi bitmap info", __FUNCTION__);
        _camera_device_reset(wcd);
        return -1;
    }
    memcpy(wcd->gdi_bitmap, wcd->frame_bitmap,
           wcd->frame_bitmap->bmiHeader.biSize);
    wcd->gdi_bitmap->bmiHeader.biCompression = BI_RGB;
    wcd->gdi_bitmap->bmiHeader.biBitCount = bitmap.bmBitsPixel;
    wcd->gdi_bitmap->bmiHeader.biSizeImage = bitmap.bmWidthBytes * bitmap.bmWidth;
    /* Adjust GDI's bitmap biHeight for proper frame direction ("top-down", or
     * "bottom-up") We do this trick in order to simplify pixel format conversion
     * routines, where we always assume "top-down" frames. The trick he is to
     * have negative biHeight in 'gdi_bitmap' if driver provides "bottom-up"
     * frames, and positive biHeight in 'gdi_bitmap' if driver provides "top-down"
     * frames. This way GetGDIBits will always return "top-down" frames. */
    if (wcd->is_top_down) {
        wcd->gdi_bitmap->bmiHeader.biHeight =
            wcd->frame_bitmap->bmiHeader.biHeight;
    } else {
        wcd->gdi_bitmap->bmiHeader.biHeight =
            -wcd->frame_bitmap->bmiHeader.biHeight;
    }

    /* Allocate framebuffer. */
    wcd->framebuffer = (uint8_t*)malloc(wcd->gdi_bitmap->bmiHeader.biSizeImage);
    if (wcd->framebuffer == NULL) {
        E("%s: Unable to allocate %d bytes for framebuffer",
          __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biSizeImage);
        _camera_device_reset(wcd);
        return -1;
    }

    /* Lets see what pixel format we will use. */
    if (wcd->gdi_bitmap->bmiHeader.biBitCount == 16) {
        wcd->pixel_format = V4L2_PIX_FMT_RGB565;
    } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 24) {
        wcd->pixel_format = V4L2_PIX_FMT_BGR24;
    } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 32) {
        wcd->pixel_format = V4L2_PIX_FMT_BGR32;
    } else {
        E("%s: Unsupported number of bits per pixel %d",
          __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biBitCount);
        _camera_device_reset(wcd);
        return -1;
    }

    D("%s: Capturing device '%s': %d bits per pixel in %.4s [%dx%d] frame",
      __FUNCTION__, wcd->window_name, wcd->gdi_bitmap->bmiHeader.biBitCount,
      (const char*)&wcd->pixel_format, wcd->frame_bitmap->bmiHeader.biWidth,
      wcd->frame_bitmap->bmiHeader.biHeight);

    return 0;
}
Exemple #27
0
bool CvCaptureCAM_VFW::setProperty(int property_id, double value)
{
    bool handledSize = false;

    switch( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
        width = cvRound(value);
        handledSize = true;
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        height = cvRound(value);
        handledSize = true;
        break;
    case CV_CAP_PROP_FOURCC:
        break;
    case CV_CAP_PROP_FPS:
        if( value > 0 )
        {
            CAPTUREPARMS params;
            if( capCaptureGetSetup(capWnd, &params, sizeof(params)) )
            {
                params.dwRequestMicroSecPerFrame = cvRound(1e6/value);
                return capCaptureSetSetup(capWnd, &params, sizeof(params)) == TRUE;
            }
        }
        break;
    default:
        break;
    }

    if ( handledSize )
    {
        // If both width and height are set then change frame size.
        if( width > 0 && height > 0 )
        {
            const DWORD size = capGetVideoFormatSize(capWnd);
            if( size == 0 )
                return false;

            unsigned char *pbi = new unsigned char[size];
            if( !pbi )
                return false;

            if( capGetVideoFormat(capWnd, pbi, size) != size )
            {
                delete []pbi;
                return false;
            }

            BITMAPINFOHEADER& vfmt = ((BITMAPINFO*)pbi)->bmiHeader;
            bool success = true;
            if( width != vfmt.biWidth || height != vfmt.biHeight )
            {
                // Change frame size.
                vfmt.biWidth = width;
                vfmt.biHeight = height;
                vfmt.biSizeImage = height * ((width * vfmt.biBitCount + 31) / 32) * 4;
                vfmt.biCompression = BI_RGB;
                success = capSetVideoFormat(capWnd, pbi, size) == TRUE;
            }
            if( success )
            {
                // Adjust capture window size.
                CAPSTATUS status = {};
                capGetStatus(capWnd, &status, sizeof(status));
                ::SetWindowPos(capWnd, NULL, 0, 0, status.uiImageWidth, status.uiImageHeight, SWP_NOZORDER|SWP_NOMOVE);
                // Store frame size.
                widthSet = width;
                heightSet = height;
            }
            delete []pbi;
            width = height = -1;

            return success;
        }

        return true;
    }

    return false;
}
Exemple #28
0
long __stdcall DlgProc ( HWND hWnd , unsigned msg , unsigned wParam , long lParam )
{ 
   switch(msg)
   {
      case WM_INITDIALOG: 
			//hEdit = GetDlgItem( hWnd , I_EDIT );  
			//GetClientRect( hEdit , &rect );
			hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, 320, 240, hWnd, 1235 );
			//hWndCap = capCreateCaptureWindow ( NULL, WS_CHILD | WS_VISIBLE , 0, 0, (rect.right-rect.left ), (rect.bottom-rect.top), hEdit, 1235);
						
			// вручную заполняем структуру CapVar
			ZeroMemory( &CapVar, sizeof(COMPVARS) );   
			CapVar.cbSize = sizeof(COMPVARS);
			CapVar.dwFlags = ICMF_COMPVARS_VALID;   
			CapVar.cbState = 0;   
			CapVar.fccHandler = mmioFOURCC( 'x', '2', '6', '4' );   
			CapVar.fccType = ICTYPE_VIDEO;

			// открываем декомпрессор (долго)
			CapVar.hic = ICOpen( ICTYPE_VIDEO, CapVar.fccHandler, ICMODE_COMPRESS ); 

			hThread = CreateThread( NULL, 0, (LPTHREAD_START_ROUTINE)SendThread, NULL, 0, 0 );
						
			return -1 ;

      case WM_COMMAND:
			switch(LOWORD(wParam))			
			{
				case I_BUTTON_CONN :

					if( !capDriverConnect( hWndCap, 0 ) )
               {
                  EndDialog ( hWnd, 0 );
                  return -1;
               }
									
					capCaptureGetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) );        
   
					CapParms.dwRequestMicroSecPerFrame = 66000;    
					CapParms.fLimitEnabled = FALSE;     
					CapParms.fCaptureAudio = FALSE;    
					CapParms.fMCIControl = FALSE;     
					CapParms.fYield = TRUE;      
					CapParms.vKeyAbort = VK_ESCAPE;    
					CapParms.fAbortLeftMouse = FALSE;    
					CapParms.fAbortRightMouse = FALSE;    
					capCaptureSetSetup( hWndCap, &CapParms, sizeof(CAPTUREPARMS) );    
     
					capPreviewScale( hWndCap, 1 );     
					capPreviewRate( hWndCap, 66 );     
					capPreviewScale( hWndCap, FALSE );    
					capPreview( hWndCap, 1 );    
									
					//added by jimmy 

					// OPTIONAL STEP: Setup resolution
					capGetVideoFormat( hWndCap, &InputBmpInfo ,sizeof(InputBmpInfo) );
					//InputBmpInfo.bmiHeader.biWidth = 320; //(rect.right-rect.left );
					//InputBmpInfo.bmiHeader.biHeight = 240; //(rect.bottom-rect.top);
					//InputBmpInfo.bmiHeader.biBitCount = 24;
					capSetVideoFormat( hWndCap, &InputBmpInfo, sizeof(InputBmpInfo) );
					//capDriverDisconnect (hWndCap, 0);//Can we do better?
					//capDriverConnect (hWndCap, 0);

					capSetCallbackOnFrame( hWndCap, FrameCallBack ); 									

					if(CapVar.hic > 0 )   
					{  
						OutFormatSize = ICCompressGetFormatSize( CapVar.hic, &InputBmpInfo.bmiHeader );   // BITMAPINFO возвращает размер структуры исходных данных InputBmpInfo
						ICCompressGetFormat( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader );   //  заполняет структуру получаемых данных OutputBmpInfo
						OutBufferSize = ICCompressGetSize( CapVar.hic, &InputBmpInfo.bmiHeader, &OutputBmpInfo.bmiHeader );   // максимальный размер одного сжатого кадра (полученного)
						ICSeqCompressFrameStart( &CapVar, &InputBmpInfo );  // начало сжатия 
					}

					break;

				case I_BUTTON_EXIT :

					ICSeqCompressFrameEnd(&CapVar);   // конец сжатия
					ICCompressorFree(&CapVar);   
					ICClose(CapVar.hic);  

					capPreview( hWndCap , false );		
					capDriverDisconnect( hWndCap );

					EndDialog ( hWnd , 0 ) ;									
					break;

			}
		   return -1 ;	

      case WM_CLOSE :

			ICSeqCompressFrameEnd(&CapVar);   // конец сжатия
			ICCompressorFree(&CapVar);   
			ICClose(CapVar.hic);  

			capPreview( hWndCap , false );		
			capDriverDisconnect( hWndCap );
				
			EndDialog ( hWnd , 0 ) ;
         return -1 ;

   }

   return 0 ;
}