コード例 #1
0
ファイル: mainwindow.cpp プロジェクト: SSE4/vmf-1
void MainWindow::openFile()
{
    stop();
    if (vmstream.isOpened())
    {
        if (QMessageBox::question(this, "Save file", "Do you want to save this file before opening other?",
            QMessageBox::Yes, QMessageBox::No) == QMessageBox::Yes)
        {
            vmstream.saveSpeed();
        }
    }
    QString filename = QFileDialog::getOpenFileName( this, "Open video file", QString(),
        "Video files (*.avi *.mpg *.mp4 *.mov *.3gp *.wmv);; All files (*.*)" );
    if (filename.length() == 0)
        return;
    try
    {
        if (vmstream.isOpened())
        {
            vmstream.closeVideo();
            vmstream.closeMetadata();
        }
        vmstream.openFile(filename);
        frameDelay = vmstream.getFrameDelay();
        ui->videoSlider->setEnabled(true);
        startVideo();
        resume();
    }
    catch (const std::runtime_error& e)
    {
        QMessageBox::critical(this, "Error", QString("Cannot open specified file: ") + e.what());
        vmstream.closeVideo();
        vmstream.closeMetadata();
    }
}
コード例 #2
0
QDroneDesktop::QDroneDesktop() {
  setupUi(this);
  
  _dctrl = new DeviceController();

  _pDashboard = new QDroneDashboard(_dctrl, this->wCtrlContainer);
  _pDriveCtrl = new QDriveControl(_dctrl, this->wCtrlContainer);
  _pAnimCtrl = new QAnimationControl(_dctrl, this->wCtrlContainer);

  this->wCtrlContainer->layout()->addWidget(_pDashboard);
  this->wCtrlContainer->layout()->addWidget(_pDriveCtrl);
  this->wCtrlContainer->layout()->addWidget(_pAnimCtrl);
  
  QGridLayout* videoLayout = new QGridLayout();
  videoLayout->setContentsMargins(0, 0, 0, 0);
  videoLayout->setObjectName(QString::fromUtf8("videoLayout"));
  wVideoContainer->setLayout(videoLayout);

  connect(actionConnect, SIGNAL(triggered()), this, SLOT(connectDrone()));
  connect(actionDisonnect, SIGNAL(triggered()), this, SLOT(disconnectDrone()));
  connect(actionStart_Video, SIGNAL(triggered()), this, SLOT(startVideo()));
  connect(actionStop_Video, SIGNAL(triggered()), this, SLOT(stopVideo()));
  connect(actionStart_Updating, SIGNAL(triggered()), _pDashboard, SLOT(startUpdating()));
  connect(actionStop_Updating, SIGNAL(triggered()), _pDashboard, SLOT(stopUpdating()));
}
コード例 #3
0
ファイル: world.cpp プロジェクト: antonikon/BattleCity
World::World()
{
	QImage *image = new QImage("data/player1.png");
	_images.append(image);
	image = new QImage("data/enemy1.png");
	_images.append(image);
	image = new QImage("data/wall2.png");
	_images.append(image);
	image = new QImage("data/wall1.png");
	_images.append(image);
	image = new QImage("data/bullet.png");
	_images.append(image);
	image = new QImage("data/base.png");
	_images.append(image);
	image = new QImage("data/boom1.png");
	_images.append(image);
	image = new QImage("data/boom2.png");
	_images.append(image);
	image = new QImage("data/gameOver.png");
	_images.append(image);
	image = new QImage("data/title.png");
	_images.append(image);
	startVideo();
	//loadMission(0);
}
コード例 #4
0
ファイル: video.cpp プロジェクト: hackorama/tag-video
void 
Video::loopVideo()
{
	if(VIDEO_STATUS == 1 ) {
		VIDEO_STATUS = startVideo() ? 0 : -6 ;
	}
}
コード例 #5
0
void SyntroLCamConsole::runDaemon()
{
	startVideo();
    startAudio();

    while (!SyntroLCamConsole::sigIntReceived)
		msleep(100); 
}
コード例 #6
0
ファイル: neu_desktop.c プロジェクト: cherry-wb/quietheart
static void my_button_press(GtkWidget *widget, GdkEventButton *event,
		gpointer data)
{
	int x, y;
//	bstate = PRESSED;

	x = event->x;
	y = event->y;

#if MY_DEBUG_OUTPUT == 1
	g_print("hello, x is:%d, y is:%d\n", x, y);
#endif
	if(in_image1(x,y))
	{/*browser*/
#if MY_DEBUG_OUTPUT == 1
		g_print("pressed in the image1\n");
#endif
		//launchApp_simple("/usr/bin/midbrowser");
		startBrowser(widget);
	}
	else if(in_image2(x,y))
	{/*Audio*/
#if MY_DEBUG_OUTPUT == 1
		g_print("pressed in the image2\n");
#endif
		//launchApp_simple("/usr/bin/ls -a");//这里启动程序
		//launchApp_simple("/usr/bin/StartAudio.sh");
		startAudio(widget);
	}
	else if(in_image3(x,y))
	{/*Video*/
#if MY_DEBUG_OUTPUT == 1
		g_print("pressed in the image3\n");
#endif
		//launchApp_simple("/usr/bin/echo good");//这里启动程序
		//launchApp_simple("/usr/bin/StartVideo.sh");
		startVideo(widget);
	}
	else if(in_image4(x,y))
	{/*Wifi*/
#if MY_DEBUG_OUTPUT == 1
		g_print("pressed in the image4\n");
#endif
		//launchApp_simple("/usr/bin/echo 'hello world'");//这里启动程序
		//launchApp_simple("/usr/bin/StartWifi.sh");//这里启动程序
		startWifi(widget);
	}
	else
	{
#if MY_DEBUG_OUTPUT == 1
		g_print("pressed out of all the image!\n");
#endif
	}
}
//----------------------------------------------------------------------------------------------------------------------
void KinectInterface::toggleVideoState(bool _mode)
{
    if(_mode ==true)
    {
        startVideo();
    }
    else
    {
        stopVideo();
    }
}
コード例 #8
0
ファイル: mainwindow.cpp プロジェクト: muromec/qtopia-ezx
void CameraMainWindow::toggleVideo()
{
    if ( recording )
        stopVideo();
    else
        startVideo();
    recording = !recording;
    camera->video->setText(recording ? tr("Stop") : tr("Video"));
    camera->photo->setEnabled(!recording);
    for (int i=0; i<nthumb; i++)
        thumb[i]->setEnabled(!recording && !thumb[i]->icon().isNull());
}
コード例 #9
0
ファイル: mainwindow.cpp プロジェクト: k0ink0in/TQ-WebCam
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);
    this->vid = new Video();
    this->vid->init();
    this->img = new ImageFactory(this);
    QObject::connect(ui->buttonStart, SIGNAL(clicked()), this, SLOT(startVideo()));
    QObject::connect(ui->buttonStop, SIGNAL(clicked()), this, SLOT(stopVideo()));    
    QObject::connect(ui->buttonExport, SIGNAL(clicked()), this, SLOT(exportImg()));
    QObject::connect(ui->listImg, SIGNAL(currentTextChanged(QString)), this, SLOT(setFileSelected(QString)));
    QObject::connect(ui->buttonSave, SIGNAL(clicked()), this, SLOT(saveImg()));
}
コード例 #10
0
ファイル: video.cpp プロジェクト: hackorama/tag-video
bool 
Video::initVideo()
{

	d_videofile = NULL;
	initVideoParams();
	bool result = false;
	result = initDShow();
	if(result)  result = initFrameFilter();
	if(result)  result = initSource();
	if(result) VIDEO_STATUS = 0;
	if(result && d_graphedit ) { 
		hr = d_addGraphToRot(graph, &d_graphRegister);
		if(FAILED(hr)) d_graphRegister = NULL;	
	}
	
	return startVideo();
}
コード例 #11
0
ファイル: qtcamera.cpp プロジェクト: jhonconal/QtCamera
QtCamera::QtCamera(QWidget *parent, Qt::WFlags flags)
	: QMainWindow(parent, flags)
{
	ui.setupUi(this);

	m_frameCount = 0;
	m_captureThread = NULL;
	m_frameRateTimer = 0;
	m_frameRefreshTimer = 0;
	m_camera = NULL;

	QWidget *centralWidget = new QWidget(this);
	QVBoxLayout *verticalLayout = new QVBoxLayout(centralWidget);
	verticalLayout->setSpacing(6);
	verticalLayout->setContentsMargins(0, 0, 0, 0);
	m_cameraView = new QLabel(centralWidget);
	
	QSizePolicy sizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
	sizePolicy.setHorizontalStretch(0);
	sizePolicy.setVerticalStretch(0);
	sizePolicy.setHeightForWidth(m_cameraView->sizePolicy().hasHeightForWidth());
	m_cameraView->setSizePolicy(sizePolicy);
	m_cameraView->setMinimumSize(QSize(320, 240));
	m_cameraView->setAlignment(Qt::AlignCenter);

	verticalLayout->addWidget(m_cameraView);

	setCentralWidget(centralWidget);


	connect(ui.actionExit, SIGNAL(triggered()), this, SLOT(close()));
	connect(ui.actionStart, SIGNAL(triggered()), this, SLOT(startVideo()));
	connect(ui.actionStop, SIGNAL(triggered()), this, SLOT(stopVideo()));
	connect(ui.actionScale, SIGNAL(triggered()), this, SLOT(toggleScaling()));

	m_pStatus = new QLabel(this);
	m_pStatus->setAlignment(Qt::AlignCenter | Qt::AlignLeft);
	m_pStatus->setText("0.0 fps  ");
	ui.statusBar->addPermanentWidget(m_pStatus);

	ui.actionStop->setEnabled(false);
	ui.actionStart->setEnabled(true);
	m_scaling = ui.actionScale->isChecked();
}
コード例 #12
0
BachelorThesis::BachelorThesis(QWidget *parent)
	: QMainWindow(parent),
	videoReader( VideoReader::Type::CPU ),
	playbackSpeed( 1 ),
	doBackgroundSubtraction( false ),
	doMeanShiftFiltering( false ),
	blurAmount( 1 ),
	isMouseButtonDown( false ),
	isVideoPaused( true ),
	originalImage( 640, 480, CV_8UC4 )
{

	ui.setupUi(this);

	timer = Timer();	

	pipeline = new ProcessingPipeline( );

	hardwareInfoDialog = new HardwareInfoDialog( this );

	// passes all events for the videoLabel to this class and handles them via a eventfilter
	ui.videoLabel->installEventFilter( this );
	ui.originalVideoLabel->installEventFilter( this );
	origin = QPoint( 0, 0 );
	roiSelector = new QRubberBand(QRubberBand::Rectangle, ui.videoLabel);
	roiSelector->setGeometry(QRect(QPoint( 0, 0 ), QPoint( 720, 576 ) ).normalized());

	connect( ui.actionOpen_File,			SIGNAL( triggered() ),				this,		SLOT( openFile() ) );
	connect( ui.pushButton,					SIGNAL( clicked() ),				this,		SLOT( startVideo() ) );
	// TODO: fix this, it should only call this slot if the slider is moved by hand
	//connect( ui.progressBarSlider,			SIGNAL( valueChanged( int) ),		this,	SLOT( jumpToFrame( int ) ) );
	connect( ui.actionOpen_Sample,			SIGNAL( triggered() ),				this,		SLOT( openSampleFile() ) );

	connect( ui.actionHardware_Info,		SIGNAL( triggered() ),				this,		SLOT( openHardwareInfoDialog() ) );

	connect( ui.actionProcesingPipeline,	SIGNAL( triggered() ),				( QObject* ) pipeline,	SLOT( toggleProcessingPipelineConfigWidgetDisplay() ) );
	connect( ui.actionOpticalFlowSelector,	SIGNAL( triggered() ),				( QObject* ) pipeline,	SLOT( toggleOpticalFlowSelectorDialog() ) );
	connect( ui.actionOpen_Video_Stream,	SIGNAL( triggered() ),				this,		SLOT( openVideoStream() ) );


}
コード例 #13
0
void SyntroLCamConsole::runConsole()
{
#ifndef WIN32
	struct termios	ctty;

	tcgetattr(fileno(stdout), &ctty);
	ctty.c_lflag &= ~(ICANON);
	tcsetattr(fileno(stdout), TCSANOW, &ctty);
#endif

	bool grabbing = startVideo();
    startAudio();

	while (grabbing) {
		printf("\nEnter option: ");

#ifdef WIN32
		switch (tolower(_getch()))
#else
        switch (tolower(getchar()))
#endif		
		{
		case 'h':
			showHelp();
			break;

		case 's':
			showStatus();
			break;

		case 'x':
			printf("\nExiting\n");
			grabbing = false;		
			break;

		case '\n':
			continue;
		}
	}
}
コード例 #14
0
ファイル: decoder.cpp プロジェクト: ehalls/xoreos
void VideoDecoder::start() {
	startVideo();

	show();
}
コード例 #15
0
void MyCameraWindow::timerEvent(QTimerEvent*)
{
	startVideo();
}
//----------------------------------------------------------------------------------------------------------------------
bool KinectInterface::init()
{
    // first see if we can init the kinect
    if (freenect_init(&m_ctx, NULL) < 0)
    {
        qDebug()<<"freenect_init() failed\n";
        exit(EXIT_FAILURE);
    }
    /// set loggin level make this programmable at some stage
    freenect_set_log_level(m_ctx, FREENECT_LOG_DEBUG);
    /// see how many devices we have
    int nr_devices = freenect_num_devices (m_ctx);
    qDebug()<<"Number of devices found: "<<nr_devices<<"\n";

    if(nr_devices < 1)
    {
        //delete s_instance;
        //s_instance = 0;
        return false;
    }
    /// now allocate the buffers so we can fill them
    m_userDeviceNumber = 0;
    // grab the buffer size and store for later use
    m_resolutionRGBBytes=freenect_find_video_mode(FREENECT_RESOLUTION_MEDIUM,FREENECT_VIDEO_RGB).bytes;
    m_bufferDepth=cvCreateMat(480,640,CV_8UC3);

    //m_bufferVideo.resize(m_resolutionRGBBytes);

    m_bufferVideo = cvCreateMat(480,640,CV_8UC3);

//    m_nextBuffer = cvCreateMat(480,640,CV_8UC1);
//    m_prevBuffer = cvCreateMat(480,640,CV_8UC1);
//    m_diffBuffer = cvCreateMat(480,640,CV_8UC1);

    m_resolutionDepthBytes=freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM,FREENECT_DEPTH_11BIT).bytes;
    //m_bufferDepthRaw.resize(m_resolutionDepthBytes);
    m_bufferDepthRaw16=cvCreateMat(480,640,CV_8UC1);

    m_bufferDepthRaw=cvCreateMat(480,640,CV_8UC1);



   // m_originalFrameDepth=NULL;



    m_gamma.resize(2048);
    /// open the device at present hard coded to device 0 as I only
    /// have 1 kinect
    /// \todo make this support multiple devices at some stage
    if (freenect_open_device(m_ctx, &m_dev, m_userDeviceNumber) < 0)
    {
        qDebug()<<"Could not open device\n";
        exit(EXIT_FAILURE);
    }


    /// build the gamma table used for the depth to rgb conversion
    /// taken from the demo programs
//    for (int i=0; i<2048; ++i)
//    {
//        float v = i/2048.0;
//        v = std::pow(v, 3)* 6;
//        m_gamma[i] = v*6*256;
//    }


    // from opencv imaging imformation wiki page http://openkinect.org/wiki/Imaging_Information
    const float k1 = 1.1863;
    const float k2 = 2842.5;
    const float k3 = 0.1236;
    const float offset = 0.037;
    float depth = 0;
    for (size_t i=0; i<2048; i++)
    {
        depth = k3 * tanf(i/k2 + k1) - offset;
        m_gamma[i] = depth;
    }


    /// init our flags
    m_newRgbFrame=false;
    m_newDepthFrame=false;
    m_deviceActive=true;

    m_threshValue = 100;


    // set our video formats to RGB by default
    /// @todo make this more flexible at some stage
    freenect_set_video_mode(m_dev, freenect_find_video_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_VIDEO_RGB));
    freenect_set_depth_mode(m_dev, freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_11BIT));
    // deprecated
    //freenect_set_video_format(m_dev, FREENECT_VIDEO_RGB);
    //freenect_set_depth_format(m_dev, FREENECT_DEPTH_11BIT);
    /// hook in the callbacks
    freenect_set_depth_callback(m_dev, depthCallback);
    freenect_set_video_callback(m_dev, videoCallback);
    // start the video and depth sub systems
    startVideo();
    startDepth();
    // set the thread to be active and start
    m_process = new QKinectProcessEvents(m_ctx);
    m_process->setActive();
    m_process->start();

    m_depthLower = 0.02;
    m_depthHigher = 1.02; // has to be just above the table (in meteres)

    //m_selectedBoxCoords = NULL;

    m_selectedBoxCoords = cv::Rect(0,0,0,0);

    m_toggleTracking = false;
    m_setBounds = false;

    return true;
}