// Prepare videowriter to capture camera
bool ProcessingThread::startRecord(std::string filepath, bool captureOriginal)
{
    // release Video if any was made until now
    releaseCapture();

    // Initials for the VideoWriter
    // Size
    int w = (int)currentROI.width;
    int h = (int)currentROI.height;
    // Codec WATCH OUT: Not every codec is available on every PC,
    // MP4V was chosen because it's famous among various systems
    //int codec = CV_FOURCC('M','P','4','V');
    // Check if grayscale is on (or camera only captures grayscale)
    bool isColor = !((imgProcFlags.grayscaleOn)||(currentFrame.channels() == 1));
    // Capture size is doubled if original should be captured too
    Size s = captureOriginal ? Size(w*2, h) : Size(w, h);
 
    bool opened = false;
    output = VideoWriter();
    opened = output.open(filepath, savingCodec, statsData.averageFPS, s, isColor);
    recordingFramerate = statsData.averageFPS;

    if(opened) {
        this->doRecord = true;
        this->captureOriginal = captureOriginal;
    }

    return opened;
}
示例#2
0
void CIrrWindow::_OnRButtonUp	( uiMouseEvent mouseEvent, int x, int y )
{
	releaseCapture();

	SEvent irrEvent;
	memset(&irrEvent, 0, sizeof(SEvent));

	irrEvent.EventType			= irr::EET_MOUSE_INPUT_EVENT;
	irrEvent.MouseInput.Event	= irr::EMIE_RMOUSE_LEFT_UP;
	irrEvent.MouseInput.X		= (short)x;
	irrEvent.MouseInput.Y		= (short)y;
	irrEvent.MouseInput.Shift	= mouseEvent.isShiftKeyDown();
	irrEvent.MouseInput.Control = mouseEvent.isCtrlKeyDown();
	irrEvent.MouseInput.Alt		= mouseEvent.isAltKeyDown();
	irrEvent.MouseInput.ButtonStates |= irr::EMBSM_LEFT;

	// post event
	m_device->postEventFromUser(irrEvent);

	// controller event
	// CControllerManager::getInstance()->getCurrentController()->onRMouseUp( x, y );

	//  set focus
	uiApplication::getRoot()->setFocus();
}
示例#3
0
static void dispose_ane()
{
    int32_t i;
    CCapture* cap;
    for(i = 0; i < MAX_ACTIVE_CAMS; i++)
    {
        cap = active_cams[i];

        if(cap)
        {
            releaseCapture(cap);

            active_cams[i] = 0;
        }
    }
    active_cams_count = 0;

    if(resize_buf_size)
    {
        cvFree(resize_buf);
        resize_buf_size = 0;
    }
    
    if(_cam_shot_data) free(_cam_shot_data);
    _ctx = NULL;
}
示例#4
0
void ATOM_CurveEditor::onMButtonUp (ATOM_WidgetMButtonUpEvent *event)
{
	if (_enableScroll)
	{
		_dragTarget = DRAG_NONE;
		releaseCapture ();
	}
}
// Destructor
ProcessingThread::~ProcessingThread()
{
    doStopMutex.lock();
    doStop = true;
    if(releaseCapture())
        qDebug() << "Released Capture";

    processingBuffer.clear();
    doStopMutex.unlock();
    wait();
}
示例#6
0
void ATOM_CurveEditor::onLButtonUp (ATOM_WidgetLButtonUpEvent *event)
{
	if (_dragTarget == DRAG_POINT)
	{
		notifyChange ();
	}

	_dragTarget = DRAG_NONE;

	releaseCapture ();
}
示例#7
0
void ATOM_CellData::onLButtonUp (ATOM_WidgetLButtonUpEvent *event)
{
	ATOM_STACK_TRACE(ATOM_CellData::onLButtonUp);

	if(_clientDragging)
	{
		moveTo(_oldPosition.x, _oldPosition.y);
	}

	if (getRenderer()->getCapture() == this)
	{
		setBorderMode (ATOM_Widget::Raise);
		releaseCapture ();
		ATOM_Widget *parent = getParent ();
		if(parent)
		{
			ATOM_Widget* hover = getRenderer()->getHover();
			if(hover && hover->getType() == WT_CELLDATA && hover->getParent())
			{
				ATOM_CellLButtonUpEvent e(hover->getId(), 0, 0, this, 0, 0);
				hover->getParent()->handleEvent(&e);
			}
			else if(hover && hover->getType() == WT_REALTIMECTRL)
			{
				ATOM_Cell* c = dynamic_cast<ATOM_Cell*>(getParent());
				if(c)
				{
					int xSize, ySize;
					c->getCellSize(xSize, ySize);
					ATOM_CellLButtonUpEvent e(hover->getId(), 0, 0, this, getId()%xSize, getId()/xSize);
					hover->handleEvent(&e);
				}
				else
				{
					ATOM_CellLButtonUpEvent e(hover->getId(), 0, 0, this, 0, 0);
					hover->handleEvent(&e);
				}
			}
			else
			{
				ATOM_CellLButtonUpEvent e(getId(), 0, 0, NULL, 0, 0);
				parent->handleEvent(&e);
			}

			if (isMouseHover())
			{
				if (parent && !_clientDragging)
				{
					parent->queueEvent (ATOM_NEW(ATOM_CellLeftClickEvent, getId(), 0, 0), ATOM_APP);
				}
			}
		}
	}
}
示例#8
0
 virtual LRESULT handleMessage(UINT uMsg, WPARAM wParam, LPARAM lParam)
 {
     Vector p = vectorFromLParam(lParam);
     switch (uMsg) {
         case WM_LBUTTONDOWN:
             _lButtonDown = true;
             break;
         case WM_LBUTTONUP:
             _lButtonDown = false;
             releaseCapture();
             break;
         case WM_MBUTTONDOWN:
             _mButtonDown = true;
             break;
         case WM_MBUTTONUP:
             _mButtonDown = false;
             releaseCapture();
             break;
         case WM_RBUTTONDOWN:
             _rButtonDown = true;
             break;
         case WM_RBUTTONUP:
             _rButtonDown = false;
             releaseCapture();
             break;
         case WM_MOUSEMOVE:
             break;
         case WM_CHAR:
             break;
         case WM_KILLFOCUS:
             _capture = false;
             ReleaseCapture();
             break;
     }
     return WindowsWindow::handleMessage(uMsg, wParam, lParam);
 }
示例#9
0
void ATOM_Button::onRButtonUp (ATOM_WidgetRButtonUpEvent * event)
{
	ATOM_STACK_TRACE(ATOM_Button::onRButtonUp);

	if (getRenderer()->getCapture() == this)
	{
		setBorderMode (ATOM_Widget::Raise);
		releaseCapture ();

		if (isMouseHover ())
		{
			onRClicked (event->shiftState);
		}
	}
}
示例#10
0
void ATOM_CellData::onRButtonUp (ATOM_WidgetRButtonUpEvent *event)
{
	ATOM_STACK_TRACE(ATOM_CellData::onRButtonUp);

	if (getRenderer()->getCapture() == this)
	{
		setBorderMode (ATOM_Widget::Raise);
		releaseCapture ();

		if (isMouseHover ())
		{
			ATOM_Widget *parent = getParent ();
			if (parent)
			{
				parent->queueEvent (ATOM_NEW(ATOM_CellRightClickEvent, getId(), 0, 0), ATOM_APP);
			}
		}
	}
}
示例#11
0
LRESULT Dispatcher::onLButtonUp( UINT uMsg, WPARAM wParam, LPARAM lParam, BOOL& bHandled )
{
	Point pt(lParam);
	Widget* pWid = NULL;
	LRESULT lResult = 0;
	pWid = getObject(m_h2oCaptured);
	if (pWid == NULL)
	{
		pWid = getWidPt(pt);
	}
	if (m_h2oCaptured.first != INVALID_HWID)
	{
		releaseCapture();
		clearH2O(m_h2oCaptured);
	}
	if (pWid != NULL)
	{
		lResult = pWid->sendMessage(uMsg,  wParam, lParam);
		Widget* pLastMM = getObject(m_h2oLastMouseMove);
		if (pLastMM != NULL)
		{
			pLastMM->sendMessage(WM_MOUSELEAVE);
		}
		clearH2O(m_h2oLastMouseMove);
	}
	pWid = getWidPt(pt);
	if (pWid != NULL
		&& pWid->isShow()
		&& m_h2oLButtonDown.first == pWid->getHwid()
		&& m_h2oLButtonDown.second == pWid)
	{
		pWid->sendMessage(WUM_LBUTTONCLICK, wParam, lParam);
	}
	clearH2O(m_h2oLButtonDown);
	return lResult;
}
示例#12
0
FREObject delCapture(FREContext ctx, void* funcData, uint32_t argc, FREObject argv[])
{
    int32_t _id;
    FREGetObjectAsInt32(argv[0], &_id);


    if(_id < 0 || _id >= MAX_ACTIVE_CAMS)
    {
        return NULL;
    }

    CCapture* cap;
    cap = active_cams[_id];

    if(cap)
    {
        releaseCapture(cap);

        active_cams[_id] = 0;
        active_cams_count--;
    }

    return NULL;
}
void ZSplitter::OnRButtonUp( UINT nFlags, CPoint point )
{
  releaseCapture();
  CWnd::OnLButtonUp( nFlags, point );
}
void ProcessingThread::stop()
{
    QMutexLocker locker(&doStopMutex);
    releaseCapture();
    doStop=true;
}
/*=====================app_main===========================*/
int app_main()
{
	int i = 0;
	void *capturebuffer0;
	void *displaybuffer;
	int counter = 0;
	int ret = 0;
	struct v4l2_format capture_fmt;
	struct v4l2_format display_fmt;
	int capture_chroma_offset, display_chroma_offset;
	int capture_size;
	int capture_fd, display_fd;
	char outputname[15];
	char stdname[15];
	int capture_numbuffers = MAX_BUFFER, display_numbuffers = MAX_BUFFER;

	for (i = 0; i < MAX_BUFFER; i++) {
		capture_buff_info[i].start = NULL;
		display_buff_info[i].start = NULL;
	}

	/* STEP1:
	 * Initialization section
	 * Initialize capture and display devices. 
	 * Here one capture channel is opened 
	 * Display channel is opened with the same standard that is detected at
	 * capture channel. same output name as input
	 * */

	/* open capture channel 0 */
	ret = initCapture(&capture_fd, &capture_numbuffers, &capture_fmt);
	if (ret < 0) {
		printf("Error in opening capture device for channel 0\n");
		return ret;
	}

	printf(" Capture initialized\n");
	/* open display channel */
	if (display_enable) {
		ret = initDisplay(&display_fd, &display_numbuffers, &display_fmt);
		if (ret < 0) {
			printf("Error in opening display device\n");
			return ret;
		}
		printf(" Display initialized\n");
		/* run section
		 * STEP2:
		 * Here display and capture channels are started for streaming. After 
		 * this capture device will start capture frames into enqueued 
		 * buffers and display device will start displaying buffers from 
		 * the qneueued buffers */

		/* start display */
		ret = startDisplay(&display_fd);
		if (ret < 0) {
			printf("Error in starting display\n");
			return ret;
		}
		printf(" display started \n");
	}
	/* start capturing for channel 0 */
	ret = startCapture(&capture_fd);
	if (ret < 0) {
		printf("Error in starting capturing for channel 0\n");
		return ret;
	}

	printf(" capture started \n");

	/* calculate the offset from where chroma data will be stored for 
	 * both capture and display */
	capture_chroma_offset = kernel_buf_size/2;
	//display_chroma_offset = display_fmt.fmt.pix.sizeimage / 2;
	display_chroma_offset = kernel_buf_size/2;
	capture_size = capture_fmt.fmt.pix.width * capture_fmt.fmt.pix.height;

	/* One buffer is dequeued from display and capture channels.
	 * Capture buffer will be copied to display buffer.
	 * All two buffers are put back to respective channels.
	 * This sequence is repeated in loop.
	 * After completion of this loop, channels are stopped.
	 * */
	printf("Going into loopback\n");

#if 0
	sleep(10);
#else
	while (1) {
		/* get capturing buffer for channel 0 */
		capturebuffer0 = getCaptureBuffer(&capture_fd);
		if (NULL == capturebuffer0) {
			printf("Error in get capture buffer for channel 0\n");
			return ret;
		}

		/* get display buffer */
		if (display_enable) {
			displaybuffer = getDisplayBuffer(&display_fd);
			if (NULL == displaybuffer) {
				printf("Error in get display buffer\n");
				return ret;
			}

			/* Copy Luma data from capture buffer to display buffer */
			memcpy(displaybuffer, capturebuffer0, capture_size);
			/* Copy chroma data from capture buffer to display buffer
			 * from the appropriate offsets in capture buffer and 
			 * display buffer */
			memcpy(displaybuffer + display_chroma_offset,
				capturebuffer0 + capture_chroma_offset,
				capture_size);

			/* put output buffer into display queue */
			ret = putDisplayBuffer(&display_fd, display_numbuffers,
					       displaybuffer);
			if (ret < 0) {
				printf("Error in put display buffer\n");
				return ret;
			}
		}
		if (save_frame && counter == 100) {
			fwrite(capturebuffer0, 1, capture_size,
				file_fp);
			fwrite(capturebuffer0 + capture_chroma_offset,
				1, capture_size,
				file_fp); 
			fclose(file_fp);
		}

		/* put buffers in capture channels */
		ret = putCaptureBuffer(&capture_fd, capture_numbuffers,
				       capturebuffer0);
		if (ret < 0) {
			printf("Error in put capture buffer for channel 0\n");
			return ret;
		}
		counter++;


		if (print_fn)
			printf("time:%lu    frame:%u\n", (unsigned long)time(NULL), counter);

		if (stress_test && counter >= MAXLOOPCOUNT)
			break;
	}
#endif

	printf("After sleep, stop capture/display\n");
	/* stop display */
	if (display_enable) {
		ret = stopDisplay(&display_fd);
		if (ret < 0) {
			printf("Error in stopping display\n");
			return ret;
		}
	}
	/* stop capturing for channel 0 */
	ret = stopCapture(&capture_fd);
	if (ret < 0) {
		printf("Error in stopping capturing for channel 0\n");
		return ret;
	}

	/* close capture channel 0 */
	ret = releaseCapture(&capture_fd, capture_numbuffers);
	if (ret < 0) {
		printf("Error in closing capture device\n");
		return ret;
	}
	/* Free section
	 * Here channels for capture and display are close.
	 * */
	/* open display channel */
	if (display_enable) {
		ret = releaseDisplay(&display_fd, display_numbuffers);
		if (ret < 0) {
			printf("Error in closing display device\n");
			return ret;
		}
	}
	return ret;
}