Beispiel #1
0
void MainWindow::initialize()
{
    HookKeyboard::Instance()->unregisterHotkey();
    server_source->stopServer();

    HookKeyboard::Instance()->registerHotkey();

    if (Settings::Instance()->remoteEnable()) {
        server_source->startServer();
    }

    disconnect(HookKeyboard::Instance(), SIGNAL(capturePress()), this, SLOT(startCapture()));
    disconnect(server_source, SIGNAL(ImageRecieved(QPixmap *)), this, SLOT(capture(QPixmap *)));
    connect(HookKeyboard::Instance(), SIGNAL(capturePress()), this, SLOT(startCapture()));
    connect(server_source, SIGNAL(ImageRecieved(QPixmap *)), this, SLOT(capture(QPixmap *)));
}
Beispiel #2
0
void Cam::setup(bool isCapture1080, float cvRatio){
    
    this->isCapture1080 = isCapture1080;
    
    // Start capturing first to work out camera resultion
    startCapture();
    
    ofLogNotice() << "Setting up camera textures at " << camWidth << "x" << camHeight << ", cvRatio: " << cvRatio;
    this->cvRatio = cvRatio;
    cvWidth = camWidth*cvRatio;
    cvHeight = camHeight*cvRatio;
    
    // CV images
    colorImage.allocate(camWidth,camHeight);
	thisGrayImage.allocate(camWidth,camHeight);
    lastGrayImage.allocate(camWidth,camHeight);
    
    delayMap.allocate(camWidth, camHeight);
    
    // optical flow
    opticalFlowLk.allocate(cvWidth, cvHeight);
    flowX.allocate(cvWidth, cvHeight);
    flowY.allocate(cvWidth, cvHeight);
    flowCombined.allocate(cvWidth, cvHeight);
    flow.allocate(camWidth, camHeight);
    
}
Beispiel #3
0
void Preview::start(unsigned int format, QSize resolution, int framerate)
{
    if (wc->previewFormatMap.isEmpty() || !wc->previewFormatMap.keys().contains(format)) return;
    Q_UNUSED(framerate);
    if (preview_active) return;

    pixelformat = qFourccToVideoFormat(format);
    qDebug()<<"pixelformat: "<<pixelformat;

    if (pixelformat == QVideoFrame::Format_Invalid) {
        qWarning()<<"v4lwebcam: pixel format not understood by video library";
        return;
    }

    wc->open();
    setupCapture(current_format = format, current_resolution = resolution);


    notifier = new QSocketNotifier(wc->fd, QSocketNotifier::Read , this);
    notifier->setEnabled(false);
    connect(notifier, SIGNAL(activated(int)), this, SLOT(captureFrame()));

    startCapture();
    preview_active = true;
}
Beispiel #4
0
/*
 * http request to capture and send an image from the cammera
 * uses actual setting set by ArdCammCommand Handler
 */
void onCapture(HttpRequest &request, HttpResponse &response) {

	Serial.printf("perform onCapture()\r\n");

	// TODO: use request parameters to overwrite camera settings
	// setupCamera(camSettings);
	myCAM.clear_fifo_flag();
	myCAM.write_reg(ARDUCHIP_FRAMES, 0x00);

	// get the picture
	startTime = millis();
	startCapture();
	Serial.printf("onCapture() startCapture() %d ms\r\n", millis() - startTime);

	ArduCAMStream *stream = new ArduCAMStream(&myCAM);

	const char * contentType = arduCamCommand.getContentType();

	if (stream->dataReady()) {
		response.setHeader("Content Lenght", String(stream->available()));
		response.sendDataStream(stream, contentType);
	}

	Serial.printf("onCapture() process Stream %d ms\r\n", millis() - startTime);
}
        bool Private_Impl_Still::takePicture ( unsigned char * preallocated_data, unsigned int length ) {
            initialize();
            int ret = 0;
            sem_t mutex;
            sem_init ( &mutex, 0, 0 );
            RASPICAM_USERDATA * userdata = new RASPICAM_USERDATA();
            userdata->cameraBoard = this;
            userdata->encoderPool = encoder_pool;
            userdata->mutex = &mutex;
            userdata->data = preallocated_data;
            userdata->bufferPosition = 0;
            userdata->offset = 0;
            userdata->startingOffset = 0;
            userdata->length = length;
            userdata->imageCallback = NULL;
            encoder_output_port->userdata = ( struct MMAL_PORT_USERDATA_T * ) userdata;
            if ( ( ret = startCapture() ) != 0 ) {
                delete userdata;
                return false;
            }
            sem_wait ( &mutex );
            sem_destroy ( &mutex );
            stopCapture();
            delete userdata;

            return true;
        }
Beispiel #6
0
bool CvCaptureCAM_Aravis::open( int index )
{
    if(create(index)) {
        // fetch properties bounds
        pixelFormats = arv_camera_get_available_pixel_formats(camera, &pixelFormatsCnt);

        arv_camera_get_width_bounds(camera, &widthMin, &widthMax);
        arv_camera_get_height_bounds(camera, &heightMin, &heightMax);
        arv_camera_set_region(camera, 0, 0, widthMax, heightMax);

        if( (fpsAvailable = arv_camera_is_frame_rate_available(camera)) )
            arv_camera_get_frame_rate_bounds(camera, &fpsMin, &fpsMax);
        if( (gainAvailable = arv_camera_is_gain_available(camera)) )
            arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
        if( (exposureAvailable = arv_camera_is_exposure_time_available(camera)) )
            arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);

        // get initial values
        pixelFormat = arv_camera_get_pixel_format(camera);
        exposure = exposureAvailable ? arv_camera_get_exposure_time(camera) : 0;
        gain = gainAvailable ? arv_camera_get_gain(camera) : 0;
        fps = arv_camera_get_frame_rate(camera);

        return startCapture();
    }
    return false;
}
Beispiel #7
0
 void OpenNICamera::setCaptureMode( CaptureMode mode )
 {
     if( mode == _captureMode )
         return;
     stopCapture();
     _captureMode = mode;
     startCapture();
 }
void CaptureInterfacesDialog::on_bStart_clicked()
{
    qDebug("Starting capture");

    emit startCapture();

    this->close();
}
void CaptureFilterEdit::applyCaptureFilter()
{
    if (syntaxState() == Invalid) {
        return;
    }

    emit startCapture();
}
Beispiel #10
0
bool camera_jai::changeAcquitionModel(int trigSetting)
{
	m_TrigSetting = trigSetting;
	stopCapture();
	settingCamera();
	startCapture();

	return true;
}
MainWindow::MainWindow(QWidget *parent, int cameratype) : QMainWindow(parent), ui(new Ui::MainWindow)
{

    ui->setupUi(this);

    // set the desired camera type (current webcam and DSLR
    this->cameraType = cameratype;
    switch (cameratype)
    {
        case CAMERA_WEBCAM: {
            this->webcam = WebCam(ui->frame);
            setupUIWebcam();
            bool ret = this->webcam.openCamera();
            if (ret == false) {
                QMessageBox msgBox;
                msgBox.setIcon(QMessageBox::Critical);
                msgBox.setText("The camera cannot be opened, check device number or connection.");
                msgBox.exec();
                ui->frame->setText("error opening camera, check device number or if any other program is accessing the camera.");
                return;
            }
        }
        break;

        case CAMERA_DSLR: {
            this->dslr = DSLR(ui);
            setupUIDSLR();
            bool ret = this->dslr.openCamera(&this->dslr);
            if (ret == false) {
                QMessageBox msgBox;
                msgBox.setIcon(QMessageBox::Critical);
                msgBox.setText("The camera cannot be opened! Please check if camera is in PTP mode and unmount memory card");
                msgBox.exec();
                ui->frame->setText("error opening camera, please check if camera is setup to PTP mode and unmount memory card from the system. Quit any application that browses the memory card. In doubt remove flash card and try again.");
                return;
            }
        }
        break;

        default:
            exit(1);
    }
    resizeFrame = false;
    resizeFrame100Percent = false;

    tmrTimer = new QTimer(this);

    // Start the main "loop" that processes the frame update
    connect(tmrTimer, SIGNAL(timeout()), this, SLOT(processFrameAndUpdateGUI()));

    startCapture();

}
 int Private_Impl_Still::startCapture ( imageTakenCallback userCallback, unsigned char * preallocated_data, unsigned int offset, unsigned int length ) {
     RASPICAM_USERDATA * userdata = new RASPICAM_USERDATA();
     userdata->cameraBoard = this;
     userdata->encoderPool = encoder_pool;
     userdata->mutex = NULL;
     userdata->data = preallocated_data;
     userdata->bufferPosition = 0;
     userdata->offset = offset;
     userdata->startingOffset = offset;
     userdata->length = length;
     userdata->imageCallback = userCallback;
     encoder_output_port->userdata = ( struct MMAL_PORT_USERDATA_T * ) userdata;
     startCapture();
 }
//Handle when the captureButton is clicked
void MainWindow::on_captureButton_clicked()
{
    if(ui->statusLabel->text() == "Status: IDLE"){
        ui->statusLabel->setText("Status: CAPTURING");
        ui->captureButton->setText("Stop");
        //INITIATE CAPTURE HERE
        startCapture();
    } else {
        ui->statusLabel->setText("Status: IDLE");
        ui->captureButton->setText("Capture");
        //STOP CAPTURE HERE
        stopCapture();
    }
}
Beispiel #14
0
bool CvCaptureCAM_Aravis::setProperty( int property_id, double value )
{
    switch ( property_id ) {
        case CV_CAP_PROP_EXPOSURE:
            if(exposureAvailable) {
                /* exposure time in seconds, like 1/100 s */
                value *= 1e6; // -> from s to us
                arv_camera_set_exposure_time(camera, exposure = BETWEEN(value, exposureMin, exposureMax));
                break;
            } else return false;

        case CV_CAP_PROP_FPS:
            if(fpsAvailable) {
                arv_camera_set_frame_rate(camera, fps = BETWEEN(value, fpsMin, fpsMax));
                break;
            } else return false;

        case CV_CAP_PROP_GAIN:
            if(gainAvailable) {
                arv_camera_set_gain(camera, gain = BETWEEN(value, gainMin, gainMax));
                break;
            } else return false;

        case CV_CAP_PROP_FOURCC:
            {
                ArvPixelFormat newFormat = pixelFormat;
                switch((int)value) {
                    case MODE_GRAY8:
                        newFormat = ARV_PIXEL_FORMAT_MONO_8;
                        break;
                    case MODE_GRAY12:
                        newFormat = ARV_PIXEL_FORMAT_MONO_12;
                        break;
                }
                if(newFormat != pixelFormat) {
                    stopCapture();
                    arv_camera_set_pixel_format(camera, pixelFormat = newFormat);
                    startCapture();
                }
            }
            break;

        default:
            return false;
    }

    return true;
}
Beispiel #15
0
//--------------------------------------------------------------
void ofApp::setup(){
	ofSetVerticalSync(true); // this seems to have no effect on windows/64/glfw currently
	ofSetFrameRate(60);

	gui.init();
	auto rootGrid = gui.addGrid();

	auto deviceList = ofxBlackmagic::Iterator::getDeviceList();

	for(auto device : deviceList) {
		auto input = shared_ptr<ofxBlackmagic::Input>(new ofxBlackmagic::Input());

		static int index = 0;
		auto mode = bmdModeHD1080p30; // switch this mode to match the resolution/refresh of your input stream
		input->startCapture(device, mode);
		this->inputs.push_back(input);

		auto panel = make_shared<ofxCvGui::Panels::Draws>(*input);
		panel->setCaption(input->getDevice().modelName);

		panel->onDraw += [input] (ofxCvGui::DrawArguments&) {
			if (input->isFrameNew()) {
				ofxCvGui::Utils::drawText("New frame", 30, 90);
			}
		};

		rootGrid->add(panel);
	}

	gui.addInspector();


	ofxCvGui::InspectController::X().onClear += [this](ofxCvGui::InspectArguments & args) {
		args.inspector->add(ofxCvGui::Widgets::LiveValueHistory::make("FPS", []() {
			return ofGetFrameRate();
		}));

		for (auto input : this->inputs) {
			args.inspector->add(ofxCvGui::Widgets::LiveValue<string>::make("Timecode", [input]() {
				stringstream ss;
				ss << input->getFrame().getTimecode();
				return ss.str();
			}));
		}
	};

	ofxCvGui::InspectController::X().clear();
}
Beispiel #16
0
MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
{
    captureForm = 0;

    Settings::Instance()->setParent(this);

    icon = new TrayIcon(this);
    connect(icon, SIGNAL(DoubleClick()), this, SLOT(startCapture()));

    HookKeyboard::Instance()->registerHotkey();
    server_source = new ServerSource(this);

    initialize();

}
        bool  Private_Impl::open ( bool StartCapture ) {
            if ( _isOpened ) return false; //already opened
// create camera
            if ( ! create_camera_component ( &State ) ) {
                cerr<<__func__<<" Failed to create camera component"<<__FILE__<<" "<<__LINE__<<endl;
                return false;
            }
            commitParameters();
            camera_video_port   = State.camera_component->output[MMAL_CAMERA_VIDEO_PORT];
            callback_data.pstate = &State;
            // assign data to use for callback
            camera_video_port->userdata = ( struct MMAL_PORT_USERDATA_T * ) &callback_data;

            _isOpened=true;
            if ( StartCapture ) return startCapture();
            else return true;
        }
void MainWindow::on_btnPauseOrResume_clicked()
{
    if (tmrTimer->isActive() == true) {
        stopCapture();
        ui->btnPauseOrResume->setText("Resume");
        ui->btnPauseOrResume->setIcon(QPixmap(":/appimages/led-icons/control_play.png"));
    }else{
        startCapture();
        ui->btnPauseOrResume->setText("Pause");
        ui->btnPauseOrResume->setIcon(QPixmap(":/appimages/led-icons/control_pause.png"));
    }

    // remove still if we enter live view again
    if (cameraType == CAMERA_DSLR) {
        this->dslr.removeStill();
    }
}
 void VideoCaptureDisplay::updateStartCapture()
 {
   ROS_INFO("updateStartCapture");
   if (first_time_) {
     ROS_WARN("ignore first time capture enabling");
   }
   else {
     // start capture!
     if (start_capture_property_->getBool()) {
       capturing_ = true;
       startCapture();
     }
     else {
       capturing_ = false;
       stopCapture();
     }
   }
 }
void MainWindow::on_btnSave_clicked()
{

    //QImage qCaptureImg((uchar*)cvmCurrentFrame.data, cvmCurrentFrame.cols, cvmCurrentFrame.rows, cvmCurrentFrame.step, QImage::Format_Indexed8);

    QImage qCaptureImg = MatToQImage(cvmCurrentFrame);

    stopCapture();

    if (filePath.isNull()) {
        filePath = QDir::currentPath();
    }

    QString filename = QFileDialog::getSaveFileName(
        this,
        tr("Save Capture"),
        filePath,
        tr("PNG (*.png);;TIF (*.tif);;JPG (*.jpg)"),
        &fileExtension);

    if( !filename.isNull() )
    {
        // save current path to set selected path in next save action
        QFileInfo fi = QFileInfo(filename);
        filePath = fi.absoluteDir().absolutePath();

        // Generate file path + file name without extension
        // this is done because the extension is set below
        // if not it could save file.png.png if a previous
        // file should be overwritten
        filename = filePath + QDir::separator() + fi.baseName();

        if (fileExtension == "PNG (*.png)") {
            filename += ".png";
        }else if(fileExtension == "TIF (*.tif)") {
            filename += ".tif";
        }else{
            filename += ".jpg";
        }
        qCaptureImg.save(filename);
    }
    startCapture();
}
Beispiel #21
0
int main()
{
	const int WIDTH = 1280, HEIGHT = 960;

	if (!openCamera(0))
	{
		std::cout << "openCamera failed!" << std::endl;
		return true;
	}

	if (!initCamera())
	{
		std::cout << "initCamera failed!" << std::endl;
		return true;
	}

	bool autov;
	setImageFormat(WIDTH, HEIGHT, 1, IMG_RAW8);
	setValue(CONTROL_EXPOSURE, 400, true);
	setValue(CONTROL_GAIN, 35, false);

	//int exposure_us = getValue(CONTROL_EXPOSURE, &autov);
	//int gain = getValue(CONTROL_GAIN, &autov);
	//int max_gain = getMax(CONTROL_GAIN);
	//std::cout << exposure_us << ", " << gain << ", " << max_gain << std::endl;

	IplImage *buffer = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);
	startCapture();

	bool captured = false;
	do
	{
		std::chrono::milliseconds(10);
		captured = getImageData((unsigned char*)buffer->imageData, buffer->imageSize, -1);
	} while (!captured);

	cvSaveImage("sun_cam_2.jpg", buffer);
	stopCapture();
	closeCamera();

	return false;
}
bool EncodeInputCamera::init(const char* cameraPath, uint32_t fourcc, int width, int height)
{
    bool ret = true;
    if (!width || !height) {
        return false;
    }
    m_width = width;
    m_height = height;
    m_fourcc = fourcc;
    if (!m_fourcc)
        m_fourcc = VA_FOURCC_YUY2;

    ret = initDevice(cameraPath);
    ASSERT(ret);
    ret = startCapture();
    ASSERT(ret);
    m_dataMode = CAMERA_DATA_MODE_MMAP;

    return true;
}
//-----------------------------------------------
void captureApp::startFadeIn(){
	light.lightOff();
	
	if( bEnableOsc ){
		printf("----------sending osc message (capture start)\n");
		ofxOscMessage m;
		m.addStringArg("FadeInStarted");
		m.addStringArg(""); //folder transferred eg: decode-NYC-12939327117
		m.addStringArg("");	//just the timestamp as a string eg: 12939327117
		m.addIntArg(0);		//num images to be transfered
		oscTx.sendMessage(m);
	}	

	if( panel.getValueF("fadeInTime") > 0 ){
		state		= CAP_STATE_FADEIN;
		fadeInStartTime  = ofGetElapsedTimef();
	}else{
		startCapture();
	}
}
Beispiel #24
0
bool CvCaptureCAM_Aravis::setProperty( int property_id, double value )
{
    switch(property_id) {
        case CV_CAP_PROP_AUTO_EXPOSURE:
            if(exposureAvailable || gainAvailable) {
                if( (controlExposure = (bool)(int)value) ) {
                    exposure = exposureAvailable ? arv_camera_get_exposure_time(camera) : 0;
                    gain = gainAvailable ? arv_camera_get_gain(camera) : 0;
                }
            }
            break;
    case CV_CAP_PROP_BRIGHTNESS:
       exposureCompensation = CLIP(value, -3., 3.);
       break;

        case CV_CAP_PROP_EXPOSURE:
            if(exposureAvailable) {
                /* exposure time in seconds, like 1/100 s */
                value *= 1e6; // -> from s to us

                arv_camera_set_exposure_time(camera, exposure = CLIP(value, exposureMin, exposureMax));
                break;
            } else return false;

        case CV_CAP_PROP_FPS:
            if(fpsAvailable) {
                arv_camera_set_frame_rate(camera, fps = CLIP(value, fpsMin, fpsMax));
                break;
            } else return false;

        case CV_CAP_PROP_GAIN:
            if(gainAvailable) {
                if ( (autoGain = (-1 == value) ) )
                    break;

                arv_camera_set_gain(camera, gain = CLIP(value, gainMin, gainMax));
                break;
            } else return false;

        case CV_CAP_PROP_FOURCC:
            {
                ArvPixelFormat newFormat = pixelFormat;
                switch((int)value) {
                    case MODE_GREY:
                    case MODE_Y800:
                        newFormat = ARV_PIXEL_FORMAT_MONO_8;
                        targetGrey = 128;
                        break;
                    case MODE_Y12:
                        newFormat = ARV_PIXEL_FORMAT_MONO_12;
                        targetGrey = 2048;
                        break;
                    case MODE_Y16:
                        newFormat = ARV_PIXEL_FORMAT_MONO_16;
                        targetGrey = 32768;
                        break;
                    case MODE_GRBG:
                        newFormat = ARV_PIXEL_FORMAT_BAYER_GR_8;
                        targetGrey = 128;
                        break;
                }
                if(newFormat != pixelFormat) {
                    stopCapture();
                    arv_camera_set_pixel_format(camera, pixelFormat = newFormat);
                    startCapture();
                }
            }
            break;

        case CV_CAP_PROP_BUFFERSIZE:
            {
                int x = (int)value;
                if((x > 0) && (x != num_buffers)) {
                    stopCapture();
                    num_buffers = x;
                    startCapture();
                }
            }
            break;


        default:
            return false;
    }

    return true;
}
Beispiel #25
0
bool CvCaptureCAM_DC1394_v2_CPP::grabFrame()
{
    dc1394capture_policy_t policy = DC1394_CAPTURE_POLICY_WAIT;
    bool code = false, isColor;
    dc1394video_frame_t *dcFrame = 0, *fs = 0;
    int i, nch;

    if (!dcCam || (!started && !startCapture()))
        return false;

    dc1394_capture_dequeue(dcCam, policy, &dcFrame);

    if (!dcFrame)
        return false;

    if (/*dcFrame->frames_behind > 1 ||*/ dc1394_capture_is_frame_corrupt(dcCam, dcFrame) == DC1394_TRUE)
    {
        goto _exit_;
    }

    isColor = dcFrame->color_coding != DC1394_COLOR_CODING_MONO8 &&
              dcFrame->color_coding != DC1394_COLOR_CODING_MONO16 &&
              dcFrame->color_coding != DC1394_COLOR_CODING_MONO16S;

    if (nimages == 2)
    {
        fs = (dc1394video_frame_t*)calloc(1, sizeof(*fs));

        //dc1394_deinterlace_stereo_frames(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED);
        dc1394_deinterlace_stereo_frames_fixed(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED);

        dc1394_capture_enqueue(dcCam, dcFrame); // release the captured frame as soon as possible
        dcFrame = 0;
        if (!fs->image)
            goto _exit_;
        isColor = colorStereo;
    }
    nch = isColor ? 3 : 1;

    for (i = 0; i < nimages; i++)
    {
        IplImage fhdr;
        dc1394video_frame_t f = fs ? *fs : *dcFrame, *fc = &f;
        f.size[1] /= nimages;
        f.image += f.size[0] * f.size[1] * i; // TODO: make it more universal
        if (isColor)
        {
            if (!frameC)
                frameC = (dc1394video_frame_t*)calloc(1, sizeof(*frameC));
            frameC->color_coding = nch == 3 ? DC1394_COLOR_CODING_RGB8 : DC1394_COLOR_CODING_MONO8;
            if (nimages == 1)
            {
                dc1394_convert_frames(&f, frameC);
                dc1394_capture_enqueue(dcCam, dcFrame);
                dcFrame = 0;
            }
            else
            {
                f.color_filter = bayerFilter;
                dc1394_debayer_frames(&f, frameC, bayer);
            }
            fc = frameC;
        }
        if (!img[i])
            img[i] = cvCreateImage(cvSize(fc->size[0], fc->size[1]), 8, nch);
        cvInitImageHeader(&fhdr, cvSize(fc->size[0], fc->size[1]), 8, nch);
        cvSetData(&fhdr, fc->image, fc->size[0]*nch);

        // Swap R&B channels:
        if (nch==3)
            cvConvertImage(&fhdr,&fhdr,CV_CVTIMG_SWAP_RB);

        if( rectify && cameraId == VIDERE && nimages == 2 )
        {
            if( !maps[0][0] || maps[0][0]->width != img[i]->width || maps[0][0]->height != img[i]->height )
            {
                CvSize size = cvGetSize(img[i]);
                cvReleaseImage(&maps[0][0]);
                cvReleaseImage(&maps[0][1]);
                cvReleaseImage(&maps[1][0]);
                cvReleaseImage(&maps[1][1]);
                maps[0][0] = cvCreateImage(size, IPL_DEPTH_16S, 2);
                maps[0][1] = cvCreateImage(size, IPL_DEPTH_16S, 1);
                maps[1][0] = cvCreateImage(size, IPL_DEPTH_16S, 2);
                maps[1][1] = cvCreateImage(size, IPL_DEPTH_16S, 1);
                char buf[4*4096];
                if( getVidereCalibrationInfo( buf, (int)sizeof(buf) ) &&
                        initVidereRectifyMaps( buf, maps[0], maps[1] ))
                    ;
                else
                    rectify = false;
            }
            cvRemap(&fhdr, img[i], maps[i][0], maps[i][1]);
        }
        else
            cvCopy(&fhdr, img[i]);
    }

    code = true;

_exit_:
    if (dcFrame)
        dc1394_capture_enqueue(dcCam, dcFrame);
    if (fs)
    {
        if (fs->image)
            free(fs->image);
        free(fs);
    }

    return code;
}
void MouseToolHandler::onGLMouseButtonPress(wxMouseEvent& ev)
{
    // Filter out the button that was triggering this event
    unsigned int state = wxutil::MouseButton::GetButtonStateChangeForMouseEvent(ev);

    // For the active tool mapping, we need just the mouse button without modifiers
    unsigned int button = wxutil::MouseButton::GetButtonStateChangeForMouseEvent(ev) & wxutil::MouseButton::ALL_BUTTON_MASK;

    ui::MouseToolPtr activeToolToBeCleared;

    if (_activeMouseTools.find(button) != _activeMouseTools.end())
    {
        // Send the click event to the currently active tool. Some tools stay active after
        // mouse up and might choose to end their action along with the mouse down event
        // The FreeMoveTool with toggle mode activated is such an example.
        ui::MouseToolPtr tool = _activeMouseTools[button];

        switch (processMouseDownEvent(tool, Vector2(ev.GetX(), ev.GetY())))
        {
        case ui::MouseTool::Result::Finished:
            // Tool is done
            activeToolToBeCleared = tool;
            break;

        case ui::MouseTool::Result::Activated:
        case ui::MouseTool::Result::Continued:
            handleViewRefresh(tool->getRefreshMode());
            break;

        case ui::MouseTool::Result::Ignored:
            break;
        };
    }

    // Now consider all the available tools and send the event
    // Currently active tools are handled above, so don't send the event again

    // Get all mouse tools mapped to this button/modifier combination
    ui::MouseToolStack toolStack = GlobalMouseToolManager().getMouseToolsForEvent(_type, state);

    // Remove all active mouse tools from this stack
    toolStack.remove_if(std::bind(&MouseToolHandler::toolIsActive, this, std::placeholders::_1));

    // The candidates have been trimmed, so let's clear out any pending tools
    if (activeToolToBeCleared)
    {
        clearActiveMouseTool(activeToolToBeCleared);
        activeToolToBeCleared.reset();
    }

    // Check which one of the candidates responds to the mousedown event
    ui::MouseToolPtr activeTool;

    for (ui::MouseToolPtr tool : toolStack)
    {
        // Ask each tool to handle the event
        ui::MouseTool::Result result = processMouseDownEvent(tool, Vector2(ev.GetX(), ev.GetY()));

        if (result != ui::MouseTool::Result::Ignored && result != ui::MouseTool::Result::Finished)
        {
            // This tool is now activated
            activeTool = tool;
            break;
        }
    }

    if (!activeTool)
    {
        return;
    }

    // Store this tool in our map
    _activeMouseTools[button] = activeTool;

    unsigned int pointerMode = activeTool->getPointerMode();

    // Check if the mousetool requires pointer freeze
    if ((pointerMode & ui::MouseTool::PointerMode::Capture) != 0)
    {
        startCapture(activeTool);
    }

    if (!_escapeListener)
    {
        // Register a hook to capture the ESC key during the active phase
        _escapeListener.reset(new KeyEventFilter(WXK_ESCAPE,
            std::bind(&MouseToolHandler::handleEscapeKeyPress, this)));
    }
}
	void Input::startCapture()
	{
		startCapture("img");
	}
/*=====================app_main===========================*/
int app_main()
{
	int i = 0;
	void *capturebuffer0;
	void *displaybuffer;
	int counter = 0;
	int ret = 0;
	struct v4l2_format capture_fmt;
	struct v4l2_format display_fmt;
	int capture_chroma_offset, display_chroma_offset;
	int capture_size;
	int capture_fd, display_fd;
	char outputname[15];
	char stdname[15];
	int capture_numbuffers = MAX_BUFFER, display_numbuffers = MAX_BUFFER;

	for (i = 0; i < MAX_BUFFER; i++) {
		capture_buff_info[i].start = NULL;
		display_buff_info[i].start = NULL;
	}

	/* STEP1:
	 * Initialization section
	 * Initialize capture and display devices. 
	 * Here one capture channel is opened 
	 * Display channel is opened with the same standard that is detected at
	 * capture channel. same output name as input
	 * */

	/* open capture channel 0 */
	ret = initCapture(&capture_fd, &capture_numbuffers, &capture_fmt);
	if (ret < 0) {
		printf("Error in opening capture device for channel 0\n");
		return ret;
	}

	printf(" Capture initialized\n");
	/* open display channel */
	if (display_enable) {
		ret = initDisplay(&display_fd, &display_numbuffers, &display_fmt);
		if (ret < 0) {
			printf("Error in opening display device\n");
			return ret;
		}
		printf(" Display initialized\n");
		/* run section
		 * STEP2:
		 * Here display and capture channels are started for streaming. After 
		 * this capture device will start capture frames into enqueued 
		 * buffers and display device will start displaying buffers from 
		 * the qneueued buffers */

		/* start display */
		ret = startDisplay(&display_fd);
		if (ret < 0) {
			printf("Error in starting display\n");
			return ret;
		}
		printf(" display started \n");
	}
	/* start capturing for channel 0 */
	ret = startCapture(&capture_fd);
	if (ret < 0) {
		printf("Error in starting capturing for channel 0\n");
		return ret;
	}

	printf(" capture started \n");

	/* calculate the offset from where chroma data will be stored for 
	 * both capture and display */
	capture_chroma_offset = kernel_buf_size/2;
	//display_chroma_offset = display_fmt.fmt.pix.sizeimage / 2;
	display_chroma_offset = kernel_buf_size/2;
	capture_size = capture_fmt.fmt.pix.width * capture_fmt.fmt.pix.height;

	/* One buffer is dequeued from display and capture channels.
	 * Capture buffer will be copied to display buffer.
	 * All two buffers are put back to respective channels.
	 * This sequence is repeated in loop.
	 * After completion of this loop, channels are stopped.
	 * */
	printf("Going into loopback\n");

#if 0
	sleep(10);
#else
	while (1) {
		/* get capturing buffer for channel 0 */
		capturebuffer0 = getCaptureBuffer(&capture_fd);
		if (NULL == capturebuffer0) {
			printf("Error in get capture buffer for channel 0\n");
			return ret;
		}

		/* get display buffer */
		if (display_enable) {
			displaybuffer = getDisplayBuffer(&display_fd);
			if (NULL == displaybuffer) {
				printf("Error in get display buffer\n");
				return ret;
			}

			/* Copy Luma data from capture buffer to display buffer */
			memcpy(displaybuffer, capturebuffer0, capture_size);
			/* Copy chroma data from capture buffer to display buffer
			 * from the appropriate offsets in capture buffer and 
			 * display buffer */
			memcpy(displaybuffer + display_chroma_offset,
				capturebuffer0 + capture_chroma_offset,
				capture_size);

			/* put output buffer into display queue */
			ret = putDisplayBuffer(&display_fd, display_numbuffers,
					       displaybuffer);
			if (ret < 0) {
				printf("Error in put display buffer\n");
				return ret;
			}
		}
		if (save_frame && counter == 100) {
			fwrite(capturebuffer0, 1, capture_size,
				file_fp);
			fwrite(capturebuffer0 + capture_chroma_offset,
				1, capture_size,
				file_fp); 
			fclose(file_fp);
		}

		/* put buffers in capture channels */
		ret = putCaptureBuffer(&capture_fd, capture_numbuffers,
				       capturebuffer0);
		if (ret < 0) {
			printf("Error in put capture buffer for channel 0\n");
			return ret;
		}
		counter++;


		if (print_fn)
			printf("time:%lu    frame:%u\n", (unsigned long)time(NULL), counter);

		if (stress_test && counter >= MAXLOOPCOUNT)
			break;
	}
#endif

	printf("After sleep, stop capture/display\n");
	/* stop display */
	if (display_enable) {
		ret = stopDisplay(&display_fd);
		if (ret < 0) {
			printf("Error in stopping display\n");
			return ret;
		}
	}
	/* stop capturing for channel 0 */
	ret = stopCapture(&capture_fd);
	if (ret < 0) {
		printf("Error in stopping capturing for channel 0\n");
		return ret;
	}

	/* close capture channel 0 */
	ret = releaseCapture(&capture_fd, capture_numbuffers);
	if (ret < 0) {
		printf("Error in closing capture device\n");
		return ret;
	}
	/* Free section
	 * Here channels for capture and display are close.
	 * */
	/* open display channel */
	if (display_enable) {
		ret = releaseDisplay(&display_fd, display_numbuffers);
		if (ret < 0) {
			printf("Error in closing display device\n");
			return ret;
		}
	}
	return ret;
}
Beispiel #29
0
void V4L1Preview::start(unsigned int format, QSize size, int framerate)
{
    Q_UNUSED(format);
    Q_UNUSED(size);
    Q_UNUSED(framerate);

    wc->frames = 0;
    wc->currentFrame = 0;

    if (!wc->open())
        emit cameraError(QtopiaCamera::FatalError, QString("cannot open device"));

    notifier = new QSocketNotifier(wc->fd, QSocketNotifier::Read , this);
    notifier->setEnabled(false);
    connect(notifier, SIGNAL(activated(int)), this, SLOT(captureFrame()));

    struct video_picture pict;
    memset( &pict, 0, sizeof( pict ) );
    ioctl( wc->fd, VIDIOCGPICT, &pict );
    pict.palette = VIDEO_PALETTE_RGB24;
    if ( ioctl( wc->fd, VIDIOCSPICT, &pict ) < 0 ) {
        qWarning( "%s: could not set the picture mode", V4L_VIDEO_DEVICE );
        emit cameraError(QtopiaCamera::FatalError, QString("VIDIOCSPICT"));
        wc->close( );
        return;
    }

    current_format = QtopiaCamera::RGB32;
    // Set the new capture window.
    struct video_window wind;
    memset( &wind, 0, sizeof( wind ) );

    wind.x = 0;
    wind.y = 0;
    wind.width = wc->current_resolution.width();
    wind.height = wc->current_resolution.height();
    if ( ioctl( wc->fd, VIDIOCSWIN, &wind ) < 0 ) {
        emit cameraError(QtopiaCamera::FatalError, QString("could not set the capture window"));
        wc->close();
        return;
    }

    // Enable mmap-based access to the camera.
    memset( &wc->mbuf, 0, sizeof( wc->mbuf ) );
    if ( ioctl( wc->fd, VIDIOCGMBUF, &wc->mbuf ) < 0 ) {
        emit cameraError(QtopiaCamera::FatalError, QString("mmap-based camera access is not available"));
        wc->close( );
        return;
    }

    // Mmap the designated memory region.
    wc->frames = (unsigned char *)mmap( 0, wc->mbuf.size, PROT_READ | PROT_WRITE,
                                    MAP_SHARED, wc->fd, 0 );
    if ( !wc->frames || wc->frames == (unsigned char *)(long)(-1) ) {
        emit cameraError(QtopiaCamera::FatalError, QString("could not mmap the device"));
        wc->close( );
        return;
    }
    preview_buffer_bytesused = wc->mbuf.size;
    startCapture();
    preview_active = true;
}
int  main()
{
	int width;
	char* bayer[] = {"RG","BG","GR","GB"};
	char* controls[MAX_CONTROL] = {"Exposure", "Gain", "Gamma", "WB_R", "WB_B", "Brightness", "USB Traffic"};

	int height;
	int i;
	char c;
	bool bresult;

	int time1,time2;
	int count=0;

	char buf[128]={0};

	int CamNum=0;
	
	///long exposure, exp_min, exp_max, exp_step, exp_flag, exp_default;
	//long gain, gain_min, gain_max,gain_step, gain_flag, gain_default;

	IplImage *pRgb;


	int numDevices = getNumberOfConnectedCameras();
	if(numDevices <= 0)
	{
		printf("no camera connected, press any key to exit\n");
		getchar();
		return -1;
	}
	else
		printf("attached cameras:\n");

	for(i = 0; i < numDevices; i++)
		printf("%d %s\n",i, getCameraModel(i));

	printf("\nselect one to privew\n");
	scanf("%d", &CamNum);


	bresult = openCamera(CamNum);
	if(!bresult)
	{
		printf("OpenCamera error,are you root?,press any key to exit\n");
		getchar();
		return -1;
	}

	printf("%s information\n",getCameraModel(CamNum));
	int iMaxWidth, iMaxHeight;
	iMaxWidth = getMaxWidth();
	iMaxHeight =  getMaxHeight();
	printf("resolution:%dX%d\n", iMaxWidth, iMaxHeight);
	if(isColorCam())
		printf("Color Camera: bayer pattern:%s\n",bayer[getColorBayer()]);
	else
		printf("Mono camera\n");
	
	for( i = 0; i < MAX_CONTROL; i++)
	{
			if(isAvailable((Control_TYPE)i))
				printf("%s support:Yes\n", controls[i]);
			else
				printf("%s support:No\n", controls[i]);
	}

	printf("\nPlease input the <width height bin image_type> with one space, ie. 640 480 2 0. use max resolution if input is 0. Press ESC when video window is focused to quit capture\n");
	int bin = 1, Image_type;
	scanf("%d %d %d %d", &width, &height, &bin, &Image_type);
	if(width == 0 || height == 0)
	{
		width = iMaxWidth;
		height = iMaxHeight;
	}

	initCamera(); //this must be called before camera operation. and it only need init once
	printf("sensor temperature:%02f\n", getSensorTemp());

//	IMG_TYPE image_type;
	
	while(!setImageFormat(width, height, bin, (IMG_TYPE)Image_type))//IMG_RAW8
	{
		printf("Set format error, please check the width and height\n ASI120's data size(width*height) must be integer multiple of 1024\n");
		printf("Please input the width and height again£¬ie. 640 480\n");
		scanf("%d %d %d %d", &width, &height, &bin, &Image_type);
	}
	printf("\nset image format %d %d %d %d success, start privew, press ESC to stop \n", width, height, bin, Image_type);

	
	if(Image_type == IMG_RAW16)
		pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_16U, 1);
	else if(Image_type == IMG_RGB24)
		pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 3);
	else
		pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 1);

	setValue(CONTROL_EXPOSURE, 100*1000, false); //ms//auto
	setValue(CONTROL_GAIN,getMin(CONTROL_GAIN), false); 
	setValue(CONTROL_BANDWIDTHOVERLOAD, getMin(CONTROL_BANDWIDTHOVERLOAD), false); //low transfer speed

	setValue(CONTROL_WB_B, 90, false);
 	setValue(CONTROL_WB_R, 48, false);
  	setAutoPara(getMax(CONTROL_GAIN)/2,10,150); //max auto gain and exposure and target brightness
//	EnableDarkSubtract("dark.bmp"); //dark subtract will be disabled when exposure set auto and exposure below 500ms
	startCapture(); //start privew


	

	bDisplay = 1;
#ifdef _LIN
	pthread_t thread_display;
	pthread_create(&thread_display, NULL, Display, (void*)pRgb);
#elif defined _WINDOWS
	HANDLE thread_setgainexp;
	thread_setgainexp = (HANDLE)_beginthread(Display,  NULL, (void*)pRgb);
#endif

	time1 = GetTickCount();
	int iStrLen = 0, iTextX = 40, iTextY = 60;
	void* retval;
//	int time0, iWaitMs = -1;
//	bool bGetImg;
	while(bMain)
	{

//		time0 = GetTickCount();
		getImageData((unsigned char*)pRgb->imageData, pRgb->imageSize, 200);

//		bGetImg = getImageData((unsigned char*)pRgb->imageData, pRgb->imageSize, iWaitMs);
		time2 = GetTickCount();
//		printf("waitMs%d, deltaMs%d, %d\n", iWaitMs, time2 - time0, bGetImg);
		count++;
		
		if(time2-time1 > 1000 )
		{
			sprintf(buf, "fps:%d dropped frames:%lu ImageType:%d",count, getDroppedFrames(), (int)getImgType());

			count = 0;
			time1=GetTickCount();	
			printf(buf);
			printf("\n");

		}
		if(Image_type != IMG_RGB24 && Image_type != IMG_RAW16)
		{
			iStrLen = strlen(buf);
			CvRect rect = cvRect(iTextX, iTextY - 15, iStrLen* 11, 20);
			cvSetImageROI(pRgb , rect);
			cvSet(pRgb, CV_RGB(180, 180, 180)); 
			cvResetImageROI(pRgb);
		}
		cvText(pRgb, buf, iTextX,iTextY );

		if(bChangeFormat)
		{
			bChangeFormat = 0;
			bDisplay = false;
			pthread_join(thread_display, &retval);
			cvReleaseImage(&pRgb);
			stopCapture();
			
			switch(change)
			{
				 case change_imagetype:
					Image_type++;
					if(Image_type > 3)
						Image_type = 0;
					
					break;
				case change_bin:
					if(bin == 1)
					{
						bin = 2;
						width/=2;
						height/=2;
					}
					else 
					{
						bin = 1;
						width*=2;
						height*=2;
					}
					break;
				case change_size_smaller:
					if(width > 320 && height > 240)
					{
						width/= 2;
						height/= 2;
					}
					break;
				
				case change_size_bigger:
				
					if(width*2*bin <= iMaxWidth && height*2*bin <= iMaxHeight)
					{
						width*= 2;
						height*= 2;
					}
					break;
			}
			setImageFormat(width, height, bin, (IMG_TYPE)Image_type);
			if(Image_type == IMG_RAW16)
				pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_16U, 1);
			else if(Image_type == IMG_RGB24)
				pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 3);
			else
				pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 1);
			bDisplay = 1;
			pthread_create(&thread_display, NULL, Display, (void*)pRgb);
			startCapture(); //start privew
		}
	}
END:
	
	if(bDisplay)
	{
		bDisplay = 0;
#ifdef _LIN
   		pthread_join(thread_display, &retval);
#elif defined _WINDOWS
		Sleep(50);
#endif
	}
	
	stopCapture();
	closeCamera();
	cvReleaseImage(&pRgb);
	printf("main function over\n");
	return 1;
}