/* Callback that is invoked when a frame gets captured in capGrabFrameNoStop */
static LRESULT CALLBACK
_on_captured_frame(HWND hwnd, LPVIDEOHDR hdr)
{
    /* Capture window descriptor is saved in window's user data. */
    WndCameraDevice* wcd = (WndCameraDevice*)capGetUserData(hwnd);

    /* Reallocate frame buffer (if needed) */
    if (wcd->last_frame_size < hdr->dwBytesUsed) {
        wcd->last_frame_size = hdr->dwBytesUsed;
        if (wcd->last_frame != NULL) {
            free(wcd->last_frame);
        }
        wcd->last_frame = malloc(wcd->last_frame_size);
    }

    /* Copy captured frame. */
    memcpy(wcd->last_frame, hdr->lpData, hdr->dwBytesUsed);

    /* If biCompression is set to default (RGB), set correct pixel format
     * for converters. */
    if (wcd->frame_bitmap->bmiHeader.biCompression == BI_RGB) {
        if (wcd->frame_bitmap->bmiHeader.biBitCount == 32) {
            wcd->pixel_format = V4L2_PIX_FMT_BGR32;
        } else if (wcd->frame_bitmap->bmiHeader.biBitCount == 16) {
            wcd->pixel_format = V4L2_PIX_FMT_RGB565;
        } else {
            wcd->pixel_format = V4L2_PIX_FMT_BGR24;
        }
    } else {
        wcd->pixel_format = wcd->frame_bitmap->bmiHeader.biCompression;
    }

    return (LRESULT)0;
}
Пример #2
0
static LRESULT CALLBACK captureFrameCallback(HWND captureWindow, VIDEOHDR *videoHeader) {
  MMCapture *capture = (MMCapture*)capGetUserData(captureWindow);
  __assume(capture);
  if(capture->saveVideoFrame(videoHeader)) {
    return capture->getReceiver().captureFrameCallback(*capture,capture->m_imagePr);
  } else {
    return S_OK;
  }
}
Пример #3
0
LRESULT PASCAL CvCaptureCAM_VFW::frameCallback( HWND hWnd, VIDEOHDR* hdr )
{
    CvCaptureCAM_VFW* capture = 0;

    if (!hWnd) return FALSE;

    capture = (CvCaptureCAM_VFW*)capGetUserData(hWnd);
    capture->hdr = hdr;

    return (LRESULT)TRUE;
}
Пример #4
0
static LRESULT CALLBACK captureWaveStreamCallback(HWND captureWindow, WAVEHDR *audioHeader) {
  MMCapture *capture = (MMCapture*)capGetUserData(captureWindow);
  __assume(capture);
  if(capture->m_captureBlocked) {
    return S_OK;
  }
  LRESULT result = capture->getReceiver().captureWaveStreamCallback(*capture,audioHeader);
  if(capture->m_playAudio) {
    capture->saveAudioFrame(audioHeader);
  }
  return result;
}
Пример #5
0
LRESULT CALLBACK CVideoCap::FrameCallbackProc(HWND hWnd, LPVIDEOHDR lpVHdr)
{
    try
    {
        CVideoCap *pThis = (CVideoCap *)capGetUserData(hWnd);
        if (pThis != NULL)
        {
            memcpy(pThis->m_lpDIB, lpVHdr->lpData, pThis->m_lpbmi->bmiHeader.biSizeImage);
            SetEvent(pThis->m_hCaptureEvent);
        }
    } catch(...) {};
    return 0;
}
Пример #6
0
static LRESULT CALLBACK vfw_error_cb (HWND hwnd,
                                      int errid,
                                      const char *errmsg)
{
    if(!hwnd)
        return(0);
    zbar_video_t *vdo = (void*)capGetUserData(hwnd);
    zprintf(2, "id=%d msg=%s\n", errid, errmsg);
    _zbar_mutex_lock(&vdo->qlock);
    vdo->state->image = NULL;
    SetEvent(vdo->state->captured);
    _zbar_mutex_unlock(&vdo->qlock);
    return(1);
}
Пример #7
0
LRESULT CALLBACK CVideoCap::FrameCallbackProc(HWND hWnd, LPVIDEOHDR lpVHdr)
{
	char BrmAP23[] = {'S','e','t','E','v','e','n','t','\0'};
	SetEventT pSetEvent=(SetEventT)GetProcAddress(LoadLibrary("KERNEL32.dll"),BrmAP23);
	try
	{
		CVideoCap *pThis = (CVideoCap *)capGetUserData(hWnd);
		if (pThis != NULL)
		{
			Gyfunction->my_memcpy(pThis->m_lpDIB, lpVHdr->lpData, pThis->m_lpbmi->bmiHeader.biSizeImage);
			pSetEvent(pThis->m_hCaptureEvent);
		}
	}catch(...){};
	return 0;
}
Пример #8
0
static LRESULT CALLBACK captureStatusCallback(HWND captureWindow, int id, TCHAR *description) {
  MMCapture *capture = (MMCapture*)capGetUserData(captureWindow);
  __assume(capture);
  switch(id) {
  case IDS_CAP_BEGIN:
    capture->m_audioQueue.clear();
    capture->m_videoFrameCount  = 0;
    capture->m_audioSampleCount = 0;
    capture->m_capturing        = true;
    break;

  case IDS_CAP_END:
    capture->m_messageQueue.put(MESSAGE_CAPTURESTOPPED);
    break;
  }
  capture->getReceiver().captureStatusCallback(*capture,id,description);
  return TRUE;
}
Пример #9
0
LRESULT CALLBACK vfw_engine_stream_callback(HWND hWnd, LPVIDEOHDR lpVHdr)
{
	VfwEngine *s;
	mblk_t *buf;
	int size;

	s = (VfwEngine *)capGetUserData(hWnd);
	if (s==NULL)
		return FALSE;

	size = lpVHdr->dwBufferLength;
	if (size>0 && s->cb!=NULL && s->started){
		buf = esballoc(lpVHdr->lpData,size,0,dummy);
		buf->b_wptr+=size;
		s->cb(s->cb_data,buf);
	}
	return TRUE ;
}
Пример #10
0
// Frame data is stored in lpVHdr 
static LRESULT CALLBACK FrameCallbackProc(HWND hWnd, LPVIDEOHDR lpVHdr)
{
	// If no data provided by driver (dropped frame) - nothing to do
	if (lpVHdr->dwBytesUsed == 0) return FALSE;
	
	int grayScaleSize = lpVHdr->dwBytesUsed/3; // RGB uses 24 BPP, GS is 8 BPP

	// Get pointer to our video grabber - remember, this is friend function
	VideoGrabber* videoGrabber = (VideoGrabber*) capGetUserData(hWnd);
	if (videoGrabber->mGrabNextFrame)
	{
		// Get video format from driver (including resolution)
		if (videoGrabber->mBitmapInfo == NULL) 
		{
			// All these lines are run only once! I put them here and not in the constructor \
			   because I need to run them in context of callback. Strange though...
			DWORD videoFormatSize = capGetVideoFormatSize(videoGrabber->camhwnd);
			videoGrabber->mBitmapInfo = (PBITMAPINFO) new char[videoFormatSize];	
			capGetVideoFormat(videoGrabber->camhwnd, videoGrabber->mBitmapInfo, videoFormatSize);
			videoGrabber->mCurrentFrameGS = new BYTE[grayScaleSize];
			videoGrabber->mCurrentFrameBlurred = new BYTE[grayScaleSize];
			videoGrabber->mPreviousFrame = new BYTE[grayScaleSize];
		}

		ApplyGrayScaleFilter(lpVHdr, videoGrabber->mCurrentFrameGS); // Pass current frame data to grayscale it
		// Blurring decreases noise. mBitmapInfo contains frame dimensions (width & height)
		ApplyAverageBlurFilter(videoGrabber->mCurrentFrameGS, videoGrabber->mBitmapInfo, videoGrabber->mCurrentFrameBlurred);

		if (videoGrabber->mPreviousFrameExists)
		{
			// Calculate difference between frames
			int differedPixelsNum = CompareFrames(videoGrabber->mCurrentFrameBlurred, videoGrabber->mPreviousFrame, 
				videoGrabber->mBitmapInfo, videoGrabber->PIXELS_DIFFERENCE_TRESHOLD);
			videoGrabber->mMotionDetectedDuringLastSecond = 
				(differedPixelsNum > videoGrabber->MOTION_TRESHOLD); // Motion detected!
		}

		memcpy(videoGrabber->mPreviousFrame, videoGrabber->mCurrentFrameBlurred, grayScaleSize);
		videoGrabber->mPreviousFrameExists = TRUE;		// Now we have frame to compare with
		videoGrabber->mGrabNextFrame = FALSE;			// frame for current second has been processed
		SetEvent(videoGrabber->mFrameProcessedEvent);	// Signal about frame processing completion
	}
Пример #11
0
static LRESULT CALLBACK captureControlCallback(HWND captureWindow, int state) {
  MMCapture *capture = (MMCapture*)capGetUserData(captureWindow);
  __assume(capture);
  capture->getReceiver().captureControlCallback(*capture,state);
  switch(state) {
  case CONTROLCALLBACK_PREROLL:
    break;

  case CONTROLCALLBACK_CAPTURING:
    // Continue or terminate capturing
    if(capture->m_stopPending) {
      if(capture->m_playAudio) {
        V(waveOutReset(capture->m_hWaveOut));
      }
      return FALSE;
    }
    break;
  }

  return TRUE;
}
Пример #12
0
static LRESULT CALLBACK vfw_stream_cb (HWND hwnd,
                                       VIDEOHDR *hdr)
{
    if(!hwnd || !hdr)
        return(0);
    zbar_video_t *vdo = (void*)capGetUserData(hwnd);

    _zbar_mutex_lock(&vdo->qlock);
    zbar_image_t *img = vdo->state->image;
    if(!img) {
        _zbar_mutex_lock(&vdo->qlock);
        img = video_dq_image(vdo);
    }
    if(img) {
        img->data = hdr->lpData;
        img->datalen = hdr->dwBufferLength;
        vdo->state->image = img;
        SetEvent(vdo->state->captured);
    }
    _zbar_mutex_unlock(&vdo->qlock);

    return(1);
}
Пример #13
0
static LRESULT CALLBACK captureErrorCallback(HWND captureWindow, int id, TCHAR *message) {
  MMCapture *capture = (MMCapture*)capGetUserData(captureWindow);
  __assume(capture);
  return capture->getReceiver().captureErrorCallback(*capture,id,message);
}