Exemple #1
0
void CvCaptureCAM_PvAPI::close()
{
    // Stop the acquisition & free the camera
    stopCapture();
    PvCameraClose(Camera.Handle);
    PvUnInitialize();
}
int VideoCaptureV4L2::closeDevice() {
  if(!is_opened) {
    return 0;
  }

  // Turn off streaming, i/o
  stopCapture();

  // Unmap mappe dmemory
  if(io_method == LINCAP_IO_METHOD_MMAP) {
    for(int i = 0; i < buffers.size(); ++i) {
      printf("Unmapping MMAP buffer: %d/%zu\n",i, buffers.size());
      LinuxCaptureBuffer* b = buffers[i];
      if(munmap(b->start, b->length) == -1) {
        printf("ERROR: cannot unmap buffer: %d\n", i);
      }
      delete b;
    }
    buffers.clear();
  }
  else {
    printf("ERROR: cannot close device because it's using an I/O method we haven't programmed yet.\n");
    return 0;
  }

  printf("Closing capture file descriptor.\n");
  if(close(fd) == -1) {
    printf("ERROR: cannot close device.\n");
  }

  is_opened = false;
  fd = 0;
  return 1;
}
Exemple #3
0
 bool Camera::close() {
     stopThread();
     // for some reason waiting for the thread keeps it from
     // completing, but sleeping then stopping capture is ok.
     ofSleepMillis(100);
     stopCapture();
 }
bool Private_Impl_Still::takePicture ( unsigned char * preallocated_data, unsigned int length ) {
    initialize();
    int ret = 0;
    sem_t mutex;
    sem_init ( &mutex, 0, 0 );
    RASPICAM_USERDATA * userdata = new RASPICAM_USERDATA();
    userdata->cameraBoard = this;
    userdata->encoderPool = encoder_pool;
    userdata->mutex = &mutex;
    userdata->data = preallocated_data;
    userdata->bufferPosition = 0;
    userdata->offset = 0;
    userdata->startingOffset = 0;
    userdata->length = length;
    userdata->imageCallback = NULL;
    encoder_output_port->userdata = ( struct MMAL_PORT_USERDATA_T * ) userdata;
    if ( ( ret = startCapture() ) != 0 ) {
        delete userdata;
        return false;
    }
    sem_wait ( &mutex );
    sem_destroy ( &mutex );
    stopCapture();
    delete userdata;

    return true;
}
Exemple #5
0
 void OpenNICamera::setCaptureMode( CaptureMode mode )
 {
     if( mode == _captureMode )
         return;
     stopCapture();
     _captureMode = mode;
     startCapture();
 }
Exemple #6
0
void CommandLineThread::run() {
    qDebug("Starting main thread");

    // Make directories
    makeDirectories();
    // Write information file
    writeInfoFile();

    // Initialize the threads and the FFmpeg QProcess
    createThreadsFromConfigs();

    // Connect stopCapture signal to processing threads
    for(unsigned int i = 0; i < procThreads.size(); i++) {
        connect(this, SIGNAL(stopCapture()), procThreads[i], SLOT(onQuitProcessing()));
    }

    // Start capturing from PAOL threads and FFmpeg
    ffmpegProcess->start(ffmpegCommand.c_str());//tried moving this after cameras start since video working while cameras failing
    sleep(5);
    for(unsigned int i = 0; i < procThreads.size(); i++) {
        procThreads[i]->start();
    }

    // Wait for the duration of the lecture, then signal threads to finish
    sleep(lectureDuration);
    emit stopCapture();
    // Stop FFmpeg
    //ffmpegProcess->write("q");
    //Kill gst process through system - send SIGINT
    system("ps -ef | awk '/[g]st-launch-1.0/ {print $2}' | xargs kill -INT");
    sleep(3);
    ffmpegProcess->closeWriteChannel();

    // Wait for FFmpeg and processing threads to finish
    ffmpegProcess->waitForFinished();
    for(unsigned int i = 0; i < procThreads.size(); i++) {
        procThreads[i]->wait();
    }

    //qDebug("Uploading All captured lectures");
    //system("~/paol-code/scripts/upload/uploadAll.sh");
    // Let the main application know that this thread finished
    qDebug("***Finishing main thread");
    emit finished();
}
Exemple #7
0
bool camera_jai::changeAcquitionModel(int trigSetting)
{
	m_TrigSetting = trigSetting;
	stopCapture();
	settingCamera();
	startCapture();

	return true;
}
EncodeInputCamera::~EncodeInputCamera()
{
    bool ret = true;

    ret = stopCapture();
    ASSERT(ret);
    ret = uninitDevice();
    ASSERT(ret);
}
Exemple #9
0
void CvCaptureCAM_Aravis::close()
{
    if(camera) {
        stopCapture();

        g_object_unref(camera);
        camera = NULL;
    }
}
Exemple #10
0
CameraIIDC::~CameraIIDC(){
    // Stop camera transmission
    if(capturing)
        stopCapture();
    else
        dc1394_capture_stop(cam);

    // Gracefulle destruct the camera
    dc1394_camera_free(cam);
    dc1394_free(context);
}
Exemple #11
0
void Preview::stop()
{
    if (!preview_active) return;
    stopCapture();

    disconnect(notifier);
    delete notifier;
    notifier = 0;

    wc->close();
    preview_active = false;
}
Exemple #12
0
CameraV4L::~CameraV4L()
{
    if (!opened)
        return;
    if (capturing)
        stopCapture();

    uninit_device(m_io, m_buffers, m_numBuffers);

    if (close(m_fd) == -1)
    {
        cerr << "Error closing device." << endl;
    }
}
//Handle when the captureButton is clicked
void MainWindow::on_captureButton_clicked()
{
    if(ui->statusLabel->text() == "Status: IDLE"){
        ui->statusLabel->setText("Status: CAPTURING");
        ui->captureButton->setText("Stop");
        //INITIATE CAPTURE HERE
        startCapture();
    } else {
        ui->statusLabel->setText("Status: IDLE");
        ui->captureButton->setText("Capture");
        //STOP CAPTURE HERE
        stopCapture();
    }
}
Exemple #14
0
bool CvCaptureCAM_Aravis::setProperty( int property_id, double value )
{
    switch ( property_id ) {
        case CV_CAP_PROP_EXPOSURE:
            if(exposureAvailable) {
                /* exposure time in seconds, like 1/100 s */
                value *= 1e6; // -> from s to us
                arv_camera_set_exposure_time(camera, exposure = BETWEEN(value, exposureMin, exposureMax));
                break;
            } else return false;

        case CV_CAP_PROP_FPS:
            if(fpsAvailable) {
                arv_camera_set_frame_rate(camera, fps = BETWEEN(value, fpsMin, fpsMax));
                break;
            } else return false;

        case CV_CAP_PROP_GAIN:
            if(gainAvailable) {
                arv_camera_set_gain(camera, gain = BETWEEN(value, gainMin, gainMax));
                break;
            } else return false;

        case CV_CAP_PROP_FOURCC:
            {
                ArvPixelFormat newFormat = pixelFormat;
                switch((int)value) {
                    case MODE_GRAY8:
                        newFormat = ARV_PIXEL_FORMAT_MONO_8;
                        break;
                    case MODE_GRAY12:
                        newFormat = ARV_PIXEL_FORMAT_MONO_12;
                        break;
                }
                if(newFormat != pixelFormat) {
                    stopCapture();
                    arv_camera_set_pixel_format(camera, pixelFormat = newFormat);
                    startCapture();
                }
            }
            break;

        default:
            return false;
    }

    return true;
}
Exemple #15
0
	void cAudioCapture::shutdownOpenALDevice()
	{
		cAudioMutexBasicLock lock(Mutex);
		if(Capturing)
			stopCapture();

		if(CaptureDevice)
		{
			alcCaptureCloseDevice(CaptureDevice);
			CaptureDevice = NULL;
			Ready = false;
			getLogger()->logDebug("AudioCapture", "OpenAL Capture Device Closed.");
			signalEvent(ON_RELEASE);
		}
		checkError();
		CaptureBuffer.clear();
	}
void MainWindow::on_btnPauseOrResume_clicked()
{
    if (tmrTimer->isActive() == true) {
        stopCapture();
        ui->btnPauseOrResume->setText("Resume");
        ui->btnPauseOrResume->setIcon(QPixmap(":/appimages/led-icons/control_play.png"));
    }else{
        startCapture();
        ui->btnPauseOrResume->setText("Pause");
        ui->btnPauseOrResume->setIcon(QPixmap(":/appimages/led-icons/control_pause.png"));
    }

    // remove still if we enter live view again
    if (cameraType == CAMERA_DSLR) {
        this->dslr.removeStill();
    }
}
void BleAudioCapture::run()
{
    m_stop = false;
    while (!m_stop) {

        // if use aac, then should send aac sequence header.
        BleAudioEncoder_AAC *aac_encoder = dynamic_cast<BleAudioEncoder_AAC*>(m_audioEncoder);
        if (!m_hasSendHeader && aac_encoder) {
            BleAudioPacket *pkt = new BleAudioPacket(Audio_Type_AAC);
            pkt->data = aac_encoder->getHeader();

            BleAVQueue::instance()->enqueue(pkt);
            m_hasSendHeader = true;
        }

        int frameSize = m_audioEncoder->getFrameSize();

        m_mutex.lock();
        while (m_bytesCache.size() < frameSize) {
            m_waitCondtion.wait(&m_mutex);
        }

        while (m_bytesCache.size() >= frameSize) {
            QByteArray frame = m_bytesCache.mid(0, frameSize);
            m_bytesCache.remove(0, frameSize);

            QByteArray outputArray;
            if (!m_audioEncoder->encode(frame, outputArray)) {
                stopCapture();
                log_error("encode audio failed.");
                return;
            }

            if (outputArray.size() > 0) {
                BleAudioPacket *pkt = new BleAudioPacket(Audio_Type_AAC);
                pkt->data = outputArray;
                BleAVQueue::instance()->enqueue(pkt);
            }
        }

        m_mutex.unlock();
    }

    log_trace("BleAudioCapture Thread exit normally");
}
Exemple #18
0
	PointListWidget::PointListWidget(QWidget *parent)
		: QWidget(parent),
		ui(new Ui::PointListWidget)
	{
		ui->setupUi(this);

		setMinimumHeight(250);

		updateClearStatus();
		on_list_itemSelectionChanged();

		delete ui->list->itemDelegate();
		ui->list->setItemDelegate(new PointItemDelegate(this));

		connect(ui->addPositionPushButton, SIGNAL(positionChosen(QPoint)), this, SLOT(positionChosen(QPoint)));
		connect(ui->capturePathPushButton, SIGNAL(positionChosen(QPoint)), this, SLOT(stopCapture()));
		connect(&mCaptureTimer, SIGNAL(timeout()), this, SLOT(capture()));
	}
 void VideoCaptureDisplay::updateStartCapture()
 {
   ROS_INFO("updateStartCapture");
   if (first_time_) {
     ROS_WARN("ignore first time capture enabling");
   }
   else {
     // start capture!
     if (start_capture_property_->getBool()) {
       capturing_ = true;
       startCapture();
     }
     else {
       capturing_ = false;
       stopCapture();
     }
   }
 }
void MainWindow::on_btnAutofocus_clicked()
{
    ui->btnAutofocus->setDisabled(true);
    stopCapture();
    // remove still if we enter live view again
    if (cameraType == CAMERA_DSLR) {
        this->dslr.removeStill();
    }
    this->dslr.triggerAutofocus();
    this->on_btnPauseOrResume_clicked();
    ui->btnAutofocus->setEnabled(true);

    //stopCapture();
    //this->dslr.closeCamera();
    //this->dslr.openCamera(&this->dslr);


}
Exemple #21
0
MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent), ui(new Ui::MainWindow)
    , _inProgress(false)
{
    // Preload icons for faster switching
    iconMicrophone = new QIcon(":/images/microphone.png");
    iconRecord = new QIcon(":/images/record.png");
    iconProcess = new QIcon(":/images/process.png");
    iconNetwork = new QIcon(":/images/network.png");

    connect(QApplication::desktop(), SIGNAL(resized(int)),
            this, SLOT(orientationChanged()));
#ifdef Q_WS_MAEMO_5
    setAttribute(Qt::WA_Maemo5StackedWindow);
    setAttribute(Qt::WA_Maemo5AutoOrientation);
#endif
    ui->setupUi(this);

    QAction * aboutAction = new QAction(tr("About EchoPrint"), this);
    connect(aboutAction, SIGNAL(triggered()), this, SLOT(on_echoNestButton_clicked()));

    ui->menuBar->addAction(aboutAction);


    nam = new QNetworkAccessManager(this);

    recorder = new RecorderMobility(this, DEFAULT_FILE);
    connect(recorder, SIGNAL(durationChanged(qint64)),
            this, SLOT(updateProgress(qint64)));
    connect(recorder, SIGNAL(error(QMediaRecorder::Error)),
            this, SLOT(displayErrorMessage()));

    connect(this, SIGNAL(captureFinished()), this, SLOT(processAudio()));
    connect(this, SIGNAL(resultReady(const EchoNestSong *)),
            this, SLOT(showResult(const EchoNestSong *)));

#ifdef USE_LIBECHONEST
    Echonest::Config::instance()->setAPIKey(API_KEY.toLatin1());
#endif

    timer = new QTimer(this);
    timer->setSingleShot(true);
    connect(timer, SIGNAL(timeout()), this, SLOT(stopCapture()));
}
void MainWindow::on_btnTakePhoto_clicked()
{
    ui->btnTakePhoto->setDisabled(true);
    stopCapture();
    // close and open the camera again, because canon dslr does not exit live view
    // and autofocus does not work
    //this->dslr.closeCamera();
    //this->dslr.openCamera(&this->dslr);
    this->dslr.captureStillImage(cvmCurrentFrame);
    ui->btnPauseOrResume->setText("Resume");
    ui->btnPauseOrResume->setIcon(QPixmap(":/appimages/led-icons/control_play.png"));
    ui->btnPauseOrResume->setChecked(true);

    ui->btnFitToScreen->setChecked(true);
    resizeFrame = true;
    updateFrameIfNecessary();
    ui->btnTakePhoto->setEnabled(true);

}
void MainWindow::on_btnSave_clicked()
{

    //QImage qCaptureImg((uchar*)cvmCurrentFrame.data, cvmCurrentFrame.cols, cvmCurrentFrame.rows, cvmCurrentFrame.step, QImage::Format_Indexed8);

    QImage qCaptureImg = MatToQImage(cvmCurrentFrame);

    stopCapture();

    if (filePath.isNull()) {
        filePath = QDir::currentPath();
    }

    QString filename = QFileDialog::getSaveFileName(
        this,
        tr("Save Capture"),
        filePath,
        tr("PNG (*.png);;TIF (*.tif);;JPG (*.jpg)"),
        &fileExtension);

    if( !filename.isNull() )
    {
        // save current path to set selected path in next save action
        QFileInfo fi = QFileInfo(filename);
        filePath = fi.absoluteDir().absolutePath();

        // Generate file path + file name without extension
        // this is done because the extension is set below
        // if not it could save file.png.png if a previous
        // file should be overwritten
        filename = filePath + QDir::separator() + fi.baseName();

        if (fileExtension == "PNG (*.png)") {
            filename += ".png";
        }else if(fileExtension == "TIF (*.tif)") {
            filename += ".tif";
        }else{
            filename += ".jpg";
        }
        qCaptureImg.save(filename);
    }
    startCapture();
}
void* Display(void* params)
{
	IplImage *pImg = (IplImage *)params;
	cvNamedWindow("video", 1);
	while(bDisplay)
	{
		cvShowImage("video", pImg);
		
		char c=cvWaitKey(1);
		switch(c)
		{
			case 27://esc
			bDisplay = false;
			bMain = false;
			goto END;
			
			case 'i'://space
			bChangeFormat = true;
			change = change_imagetype;
			break;

			case 'b'://space
			bChangeFormat = true;
			change = change_bin;
			break;

			case 'w'://space
			bChangeFormat = true;
			change = change_size_smaller;
			break;

			case 's'://space
			bChangeFormat = true;
			change = change_size_bigger;
			break;
		}
	}
END:
	cvDestroyWindow("video");
	printf("Display thread over\n");
	stopCapture();
	return (void*)0;
}
Exemple #25
0
int main()
{
	const int WIDTH = 1280, HEIGHT = 960;

	if (!openCamera(0))
	{
		std::cout << "openCamera failed!" << std::endl;
		return true;
	}

	if (!initCamera())
	{
		std::cout << "initCamera failed!" << std::endl;
		return true;
	}

	bool autov;
	setImageFormat(WIDTH, HEIGHT, 1, IMG_RAW8);
	setValue(CONTROL_EXPOSURE, 400, true);
	setValue(CONTROL_GAIN, 35, false);

	//int exposure_us = getValue(CONTROL_EXPOSURE, &autov);
	//int gain = getValue(CONTROL_GAIN, &autov);
	//int max_gain = getMax(CONTROL_GAIN);
	//std::cout << exposure_us << ", " << gain << ", " << max_gain << std::endl;

	IplImage *buffer = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);
	startCapture();

	bool captured = false;
	do
	{
		std::chrono::milliseconds(10);
		captured = getImageData((unsigned char*)buffer->imageData, buffer->imageSize, -1);
	} while (!captured);

	cvSaveImage("sun_cam_2.jpg", buffer);
	stopCapture();
	closeCamera();

	return false;
}
Exemple #26
0
bool HDHRDevice::setupEncoding(char *path, size_t ringSize)
{
	pthread_t foo;
	
	stopCapture();
	if(!mChannel) return false;
	
	// path could be NULL, e.g., for data scanning
	if(path) {
		mOutputPath = new char[strlen(path)+1];
		strcpy(mOutputPath, path);
		mOutputFile = fopen(mOutputPath, "wb");
	} // else leave 'em NULL
	mMaxFileSize = ringSize;
	
	mLastMealSize = 0;
	mChannel->flush();
	mChannel->setOutputFile(mOutputFile, mMaxFileSize);
	mKillCaptureThread = false;
	pthread_create(&foo, NULL, HDHRDevice::captureThreadEntry, this);
	
	return true;
}
 WindowManager::~WindowManager()
 {
   stopCapture ();
   scene_ptr_.reset();
   viewer_ptr_ = NULL;
 }
Exemple #28
0
StopmotionWidget::StopmotionWidget(MonitorManager *manager, KUrl projectFolder, QList< QAction* > actions, QWidget* parent) :
    QDialog(parent)
    , Ui::Stopmotion_UI()
    , m_projectFolder(projectFolder)
    , m_captureDevice(NULL)
    , m_sequenceFrame(0)
    , m_animatedIndex(-1)
    , m_animate(false)
    , m_manager(manager)
    , m_monitor(new StopmotionMonitor(manager, this))
{
    //setAttribute(Qt::WA_DeleteOnClose);
    //HACK: the monitor widget is hidden, it is just used to control the capturedevice from monitormanager
    m_monitor->setHidden(true);
    connect(m_monitor, SIGNAL(stopCapture()), this, SLOT(slotStopCapture()));
    m_manager->appendMonitor(m_monitor);
    QAction* analyse = new QAction(i18n("Send frames to color scopes"), this);
    analyse->setCheckable(true);
    analyse->setChecked(KdenliveSettings::analyse_stopmotion());
    connect(analyse, SIGNAL(triggered(bool)), this, SLOT(slotSwitchAnalyse(bool)));

    QAction* mirror = new QAction(i18n("Mirror display"), this);
    mirror->setCheckable(true);
    //mirror->setChecked(KdenliveSettings::analyse_stopmotion());
    connect(mirror, SIGNAL(triggered(bool)), this, SLOT(slotSwitchMirror(bool)));

    addActions(actions);
    setupUi(this);
    setWindowTitle(i18n("Stop Motion Capture"));
    setFont(KGlobalSettings::toolBarFont());

    live_button->setIcon(KIcon("camera-photo"));

    m_captureAction = actions.at(0);
    connect(m_captureAction, SIGNAL(triggered()), this, SLOT(slotCaptureFrame()));
    m_captureAction->setCheckable(true);
    m_captureAction->setChecked(false);
    capture_button->setDefaultAction(m_captureAction);

    connect(actions.at(1), SIGNAL(triggered()), this, SLOT(slotSwitchLive()));

    QAction *intervalCapture = new QAction(i18n("Interval capture"), this);
    intervalCapture->setIcon(KIcon("chronometer"));
    intervalCapture->setCheckable(true);
    intervalCapture->setChecked(false);
    capture_interval->setDefaultAction(intervalCapture);

    preview_button->setIcon(KIcon("media-playback-start"));
    capture_button->setEnabled(false);


    // Build config menu
    QMenu* confMenu = new QMenu;
    m_showOverlay = actions.at(2);
    connect(m_showOverlay, SIGNAL(triggered(bool)), this, SLOT(slotShowOverlay(bool)));
    overlay_button->setDefaultAction(m_showOverlay);
    //confMenu->addAction(m_showOverlay);

    m_effectIndex = KdenliveSettings::stopmotioneffect();
    QMenu* effectsMenu = new QMenu(i18n("Overlay effect"));
    QActionGroup* effectGroup = new QActionGroup(this);
    QAction* noEffect = new QAction(i18n("No Effect"), effectGroup);
    noEffect->setData(0);
    QAction* contrastEffect = new QAction(i18n("Contrast"), effectGroup);
    contrastEffect->setData(1);
    QAction* edgeEffect = new QAction(i18n("Edge detect"), effectGroup);
    edgeEffect->setData(2);
    QAction* brightEffect = new QAction(i18n("Brighten"), effectGroup);
    brightEffect->setData(3);
    QAction* invertEffect = new QAction(i18n("Invert"), effectGroup);
    invertEffect->setData(4);
    QAction* thresEffect = new QAction(i18n("Threshold"), effectGroup);
    thresEffect->setData(5);

    effectsMenu->addAction(noEffect);
    effectsMenu->addAction(contrastEffect);
    effectsMenu->addAction(edgeEffect);
    effectsMenu->addAction(brightEffect);
    effectsMenu->addAction(invertEffect);
    effectsMenu->addAction(thresEffect);
    QList <QAction*> list = effectsMenu->actions();
    for (int i = 0; i < list.count(); i++) {
        list.at(i)->setCheckable(true);
        if (list.at(i)->data().toInt() == m_effectIndex) {
            list.at(i)->setChecked(true);
        }
    }
    connect(effectsMenu, SIGNAL(triggered(QAction*)), this, SLOT(slotUpdateOverlayEffect(QAction*)));
    confMenu->addMenu(effectsMenu);

    QAction* showThumbs = new QAction(KIcon("image-x-generic"), i18n("Show sequence thumbnails"), this);
    showThumbs->setCheckable(true);
    showThumbs->setChecked(KdenliveSettings::showstopmotionthumbs());
    connect(showThumbs, SIGNAL(triggered(bool)), this, SLOT(slotShowThumbs(bool)));

    QAction* removeCurrent = new QAction(KIcon("edit-delete"), i18n("Delete current frame"), this);
    removeCurrent->setShortcut(Qt::Key_Delete);
    connect(removeCurrent, SIGNAL(triggered()), this, SLOT(slotRemoveFrame()));

    QAction* conf = new QAction(KIcon("configure"), i18n("Configure"), this);
    connect(conf, SIGNAL(triggered()), this, SLOT(slotConfigure()));

    confMenu->addAction(showThumbs);
    confMenu->addAction(removeCurrent);
    confMenu->addAction(analyse);
    confMenu->addAction(mirror);
    confMenu->addAction(conf);
    config_button->setIcon(KIcon("configure"));
    config_button->setMenu(confMenu);

    connect(sequence_name, SIGNAL(textChanged(const QString&)), this, SLOT(sequenceNameChanged(const QString&)));
    connect(sequence_name, SIGNAL(currentIndexChanged(int)), live_button, SLOT(setFocus()));

    // Video widget holder
    QVBoxLayout *layout = new QVBoxLayout;
    layout->setContentsMargins(0, 0, 0, 0);
    layout->setSpacing(0);
    m_monitor->videoBox->setLineWidth(4);
    layout->addWidget(m_monitor->videoBox);

    if (KdenliveSettings::decklink_device_found()) {
        // Found a BlackMagic device
    }

    if (QFile::exists(KdenliveSettings::video4vdevice())) {
#ifdef USE_V4L
        // Video 4 Linux device detection
        for (int i = 0; i < 10; i++) {
            QString path = "/dev/video" + QString::number(i);
            if (QFile::exists(path)) {
                QStringList deviceInfo = V4lCaptureHandler::getDeviceName(path);
                if (!deviceInfo.isEmpty()) {
                    capture_device->addItem(deviceInfo.at(0), "v4l");
                    capture_device->setItemData(capture_device->count() - 1, path, Qt::UserRole + 1);
                    capture_device->setItemData(capture_device->count() - 1, deviceInfo.at(1), Qt::UserRole + 2);
                    if (path == KdenliveSettings::video4vdevice()) capture_device->setCurrentIndex(capture_device->count() - 1);
                }
            }
        }
#endif /* USE_V4L */
    }

    connect(capture_device, SIGNAL(currentIndexChanged(int)), this, SLOT(slotUpdateDeviceHandler()));
    /*if (m_bmCapture) {
        connect(m_bmCapture, SIGNAL(frameSaved(const QString &)), this, SLOT(slotNewThumb(const QString &)));
        connect(m_bmCapture, SIGNAL(gotFrame(QImage)), this, SIGNAL(gotFrame(QImage)));
    } else live_button->setEnabled(false);*/

    m_frame_preview = new MyLabel(this);
    connect(m_frame_preview, SIGNAL(seek(bool)), this, SLOT(slotSeekFrame(bool)));
    connect(m_frame_preview, SIGNAL(switchToLive()), this, SLOT(slotSwitchLive()));
    layout->addWidget(m_frame_preview);
    m_frame_preview->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
    video_preview->setLayout(layout);

    //kDebug()<<video_preview->winId();

    QString profilePath;
    // Create MLT producer data
    if (capture_device->itemData(capture_device->currentIndex()) == "v4l") {
        // Capture using a video4linux device
        profilePath = KStandardDirs::locateLocal("appdata", "profiles/video4linux");
    }
    else {
        // Decklink capture
        profilePath = KdenliveSettings::current_profile();
    }

    m_captureDevice = new MltDeviceCapture(profilePath, m_monitor->videoSurface, this);
    m_captureDevice->sendFrameForAnalysis = KdenliveSettings::analyse_stopmotion();
    m_monitor->setRender(m_captureDevice);
    connect(m_captureDevice, SIGNAL(frameSaved(const QString &)), this, SLOT(slotNewThumb(const QString &)));

    live_button->setChecked(false);
    button_addsequence->setEnabled(false);
    connect(live_button, SIGNAL(toggled(bool)), this, SLOT(slotLive(bool)));
    connect(button_addsequence, SIGNAL(clicked(bool)), this, SLOT(slotAddSequence()));
    connect(preview_button, SIGNAL(clicked(bool)), this, SLOT(slotPlayPreview(bool)));
    connect(frame_list, SIGNAL(currentRowChanged(int)), this, SLOT(slotShowSelectedFrame()));
    connect(frame_list, SIGNAL(itemClicked(QListWidgetItem*)), this, SLOT(slotShowSelectedFrame()));
    connect(this, SIGNAL(doCreateThumbs(QImage, int)), this, SLOT(slotCreateThumbs(QImage, int)));

    frame_list->addAction(removeCurrent);
    frame_list->setContextMenuPolicy(Qt::ActionsContextMenu);
    frame_list->setHidden(!KdenliveSettings::showstopmotionthumbs());
    parseExistingSequences();
    QTimer::singleShot(500, this, SLOT(slotLive()));
    connect(&m_intervalTimer, SIGNAL(timeout()), this, SLOT(slotCaptureFrame()));
    m_intervalTimer.setSingleShot(true);
    m_intervalTimer.setInterval(KdenliveSettings::captureinterval() * 1000);
}
Exemple #29
0
void StopmotionMonitor::stop()
{
    if (m_captureDevice) m_captureDevice->stop();
    emit stopCapture();
}
Exemple #30
0
camera_jai::~camera_jai(void)
{
	stopCapture();
	closeCamera();
}