コード例 #1
0
ファイル: VideoSender.cpp プロジェクト: dtbinh/dviz
void VideoSender::setVideoSource(VideoSource *source)
{
	if(m_source == source)
		return;
		
	if(m_source)
		disconnectVideoSource();
	
	m_source = source;
	if(m_source)
	{	
		connect(m_source, SIGNAL(frameReady()), this, SLOT(frameReady()));
		connect(m_source, SIGNAL(destroyed()), this, SLOT(disconnectVideoSource()));
		
		if(CameraThread *camera = dynamic_cast<CameraThread*>(m_source))
		{
			connect(camera, SIGNAL(signalStatusChanged(bool)), this, SIGNAL(signalStatusChanged(bool)));
		}
		
		//qDebug() << "GLVideoDrawable::setVideoSource(): "<<objectName()<<" m_source:"<<m_source;
		//setVideoFormat(m_source->videoFormat());
		m_consumerRegistered = false;
		
		frameReady();
	}
コード例 #2
0
ファイル: V4LOutput.cpp プロジェクト: TritonSailor/livepro
void V4LOutput::setVideoSource(VideoSource *source)
{
	if(m_source == source)
		return;
		
	if(m_source)
		disconnectVideoSource();
	
	m_source = source;
	if(m_source)
	{	
		connect(m_source, SIGNAL(frameReady()), this, SLOT(frameReady()));
		connect(m_source, SIGNAL(destroyed()), this, SLOT(disconnectVideoSource()));
		
		//qDebug() << "GLVideoDrawable::setVideoSource(): "<<objectName()<<" m_source:"<<m_source;
		//setVideoFormat(m_source->videoFormat());
		
		frameReady();
	}
	else
	{
		qDebug() << "V4LOutput::setVideoSource(): "<<this<<" Source is NULL";
	}

}
コード例 #3
0
ファイル: capture_wgt.cpp プロジェクト: z80/chassis
void CaptureWgt::setVideo( QXmppVideo * video )
{
    if ( pd->video )
        disconnect( pd->video, SIGNAL(frameReady()), this, SLOT(slotFrameReady()) );
    pd->video = video;
    connect( pd->video, SIGNAL(frameReady()), this, SLOT(slotFrameReady()) );
}
コード例 #4
0
ファイル: VideoFilter.cpp プロジェクト: TritonSailor/livepro
void VideoFilter::setVideoSource(VideoSource* source)
{
	if(source == m_source)
		return;
	
	if(m_source)
	{
		disconnectVideoSource();
	}
	
	m_source = source;
	
	if(m_source)
	{
		connect(m_source, SIGNAL(frameReady()), this, SLOT(frameAvailable()));
		connect(m_source, SIGNAL(destroyed()),  this, SLOT(disconnectVideoSource()));
		m_source->registerConsumer(this);
		
		// pull in the first frame
		frameAvailable();
	}
	else
	{
		//qDebug() << "VideoFilter::setVideoSource(): "<<(QObject*)this<<" Source is NULL";
	}
}
コード例 #5
0
ファイル: vplayer.cpp プロジェクト: EvorzStudios/plexydesk
void VPlayer::decode() {
  if (av_read_frame(d->pFormatCtx, &d->packet) >= 0) {
    if (d->packet.stream_index == d->videoStream) {

      avcodec_decode_video(d->pCodecCtx, d->pFrame, &d->frameFinished,
                           d->packet.data, d->packet.size);

      if (d->frameFinished) {

        img_convert((AVPicture *)d->pFrameRGB, PIX_FMT_RGBA32,
                    (AVPicture *)d->pFrame, PIX_FMT_YUV420P,
                    d->pCodecCtx->width, d->pCodecCtx->height);
        d->currentFrame =
            new QImage(d->pFrameRGB->data[0], d->pCodecCtx->width,
                       d->pCodecCtx->height, QImage::Format_ARGB32);
        // d->video->setPixmap(QPixmap::fromImage(*d->currentFrame));
        emit frameReady(*d->currentFrame);
        //      delete d->currentFrame;
      } else {
        qDebug("Video not ready");
      }
    }
  } else {
    emit videoDone();
    d->vidtimer->stop();
  }

  av_free_packet(&d->packet);
}
コード例 #6
0
ファイル: webcams.cpp プロジェクト: Fale/qtmoko
void Preview::captureFrame()
{
    if (poll(&wc->fpolls, 1, 50) > 0) {
        // dequeue buffer
        if (ioctl(wc->fd, VIDIOC_DQBUF, &wc->buffer) == 0) {

            QVideoFrame vframe( pixelformat, QSize(preview_buffer_width, preview_buffer_height),
                        reinterpret_cast<uchar*>(preview_buffer_data));
            emit frameReady(vframe);

            int ret = ioctl(wc->fd, VIDIOC_QBUF, &wc->buffer);

            // queue up next buffer
            if(ret == -1) {
                if(errno == EAGAIN) {
                    qDebug()<<"nonblocking io slected O_NONBLOCK and no buffer was  in outgoing queue";
                }  else if(errno == EINVAL) {
                    qDebug()<<"buffer type not supported or index OOB or no buffers have been alloced";
                }else if(errno == ENOMEM) {
                    qDebug()<<"insufficient memory";
                }else if(errno == EIO) {
                    qDebug()<<"internal error";
                }
            }
        }
    }
}
コード例 #7
0
ファイル: CirclesModule.cpp プロジェクト: guyvdb/Multi-Touch
  /* -------------------------------------------------------------------------------------------
   *
   * ------------------------------------------------------------------------------------------- */
  void CirclesModule::OnFrame(mtv::Module *module, const QString name, cv::Mat &matrix) {
    int method = this->methodToCvIndentifier(this->setting("method")->asString());
    if(method != 0) {
      cv::Mat frame;
      matrix.copyTo(frame);
      std::vector<cv::Vec3f> circles;

      double res = this->setting("resolution")->asDouble();
      double dist = this->setting("distance")->asDouble();
      double thres = this->setting("threshold")->asDouble();
      double vote = this->setting("votes")->asDouble();
      int min = this->setting("min-radius")->asInteger();
      int max = this->setting("max-radius")->asInteger();

      cv::HoughCircles(matrix, circles, method, res,dist, thres, vote, min, max);

      if(circles.size() > 0) {
        std::vector<cv::Vec3f>::const_iterator i = circles.begin();
        while(i != circles.end()) {
          cv::circle(frame, cv::Point((*i)[0],(*i)[1]), (*i)[2], cv::Scalar(255), 2);
          i++;
        }
        //TODO output the circules as a List of PointsList
        emit frameReady(this,"OUTPUT",frame);
      }
    }
  }
コード例 #8
0
ファイル: VideoModule.cpp プロジェクト: guyvdb/Multi-Touch
/* -------------------------------------------------------------------------------------------
 *
 * ------------------------------------------------------------------------------------------- */
void VideoModule::tick() {
    cv::Mat frame;
    *this->capture >> frame;

    //std::cout << "T";

    if(frame.data != 0x0) {
        //std::cout << "F";
        this->frameCount++;
        emit frameReady(this, "OUTPUT", frame);
    } else {
        std::cout << ".";

        int actual = this->frameCount / (this->time.elapsed()/1000);

        this->frameCount = 0;
        this->time.start();

        //qDebug() << "FPS (actual) " << actual;


        delete this->capture;
        QString filename = this->setting("filename")->asString();
        std::string file(filename.toUtf8().data());
        this->capture = new cv::VideoCapture(file);
    }
}
コード例 #9
0
void VideoThumbnailer::stateChanged(Phonon::State newState, Phonon::State oldState)
{
    Q_UNUSED(oldState);
    if (newState == Phonon::PlayingState) {
        connect(&m_vdata, SIGNAL(frameReadySignal(Phonon::Experimental::VideoFrame2)),
            this, SLOT(frameReady(Phonon::Experimental::VideoFrame2)));
        m_eventLoop.exit(1);
    }
}
コード例 #10
0
ファイル: VideoWidget.cpp プロジェクト: dtbinh/dviz
void VideoWidget::connectVideoSource(VideoSource *source)
{
	m_thread = source;
	if(!m_thread)
	{
		repaint();
		return;
	}

	connect(m_thread, SIGNAL(frameReady()), this, SLOT(frameReady()), Qt::QueuedConnection);
	connect(m_thread, SIGNAL(destroyed()), this, SLOT(sourceDestroyed()));
	m_thread->registerConsumer(this);

	m_elapsedTime.start();
	m_paintTimer.start();
	
	frameReady(); // prime the pump
}
コード例 #11
0
ファイル: Player.cpp プロジェクト: FihlaTV/qt-youtube
void Player::unlock(void *data, void *id, void *const *pixels)
{
    Q_UNUSED(id);
    Q_UNUSED(pixels);

    vlc_callback *callback = static_cast<vlc_callback *>(data);
    callback->mutex->unlock();
    callback->target->emit frameReady(callback);
}
コード例 #12
0
void MjpegPlayer::readImg()
{
    if (m_reply->bytesAvailable()) {
        QByteArray array = m_multiPartReader->read( m_reply->readAll() );
        if ( !array.isNull() && m_currentPixmap->loadFromData(array)) {
            emit( frameReady(m_currentPixmap) );
        }
    }
}
コード例 #13
0
ファイル: GaussianModule.cpp プロジェクト: guyvdb/Multi-Touch
  /* -------------------------------------------------------------------------------------------
   *
   * ------------------------------------------------------------------------------------------- */
  void GaussianModule::OnFrame(mtv::Module *module, const QString name, cv::Mat &matrix) {
    cv::Mat frame(matrix.size(),matrix.depth(), cv::Scalar(255));

    int size = this->setting("size")->asInteger();
    if(size % 2 != 1) size++; // ensure odd
    cv::Size box(size, size);
    double weight = this->setting("weight")->asDouble();
    cv::GaussianBlur(matrix,frame,box,weight);
    emit frameReady(this,"OUTPUT",frame);
  }
コード例 #14
0
void VideoThumbnailer::frameReady(const Phonon::Experimental::VideoFrame2 &frame)
{
    QImage thumb = frame.qImage().scaled(m_thumbnailSize.width(), m_thumbnailSize.height(), Qt::KeepAspectRatio);
    if (isFrameInteresting(thumb)) {
        m_thumbnailImage = thumb;
        m_vdata.disconnect(SIGNAL(frameReadySignal(Phonon::Experimental::VideoFrame2)),
            this, SLOT(frameReady(Phonon::Experimental::VideoFrame2)));
        m_eventLoop.quit();
        return;
    }
    m_eventLoop.exit(1);
}
コード例 #15
0
  /* -------------------------------------------------------------------------------------------
   *
   * ------------------------------------------------------------------------------------------- */
  void FastFeaturesModule::OnFrame(mtv::Module *module, const QString name, cv::Mat &matrix) {
    cv::Mat frame;
    std::vector<cv::KeyPoint> keypoints;

    cv::FastFeatureDetector fast(10);
    fast.detect(matrix,keypoints);

    matrix.copyTo(frame);

    cv::drawKeypoints(matrix,keypoints,frame, cv::Scalar(255,255,255), cv::DrawMatchesFlags::DRAW_OVER_OUTIMG);

    //TODO emit List of PointList
    emit frameReady(this,"OUTPUT",frame);
  }
コード例 #16
0
 void VideoWidget::initialize(QStatusBar *bar, SFMViewer *sfmViewer, SceneModel * sceneModel){

     // Connect surface to our slot
     connect(surface, SIGNAL(frameAvailable()), this, SLOT(frameReady()));

     processor = new ProcessingThread(this);
     connect(processor, SIGNAL(frameProcessed()), this, SLOT(onFrameProcessed()));
     connect(processor, SIGNAL(queueFull()), this, SLOT(onThreadCongested()));

	 processor->initialize(bar, sceneModel);
	 processor->setUpdateListener(sfmViewer);

     processor->start();
}
コード例 #17
0
ファイル: JpegServer.cpp プロジェクト: dtbinh/dviz
void JpegServer::incomingConnection(int socketDescriptor)
{
	JpegServerThread *thread = new JpegServerThread(socketDescriptor, m_adaptiveWriteEnabled);
	connect(thread, SIGNAL(finished()), thread, SLOT(deleteLater()));
	connect(this, SIGNAL(frameReady(QImage)), thread, SLOT(imageReady(QImage)), Qt::QueuedConnection);
	thread->start();
	qDebug() << "JpegServer: Client Connected, Socket Descriptor:"<<socketDescriptor;
	
	
	thread->moveToThread(thread);
	
	if(!m_timer.isActive())
		m_timer.start();
}
コード例 #18
0
ファイル: VideoWidget.cpp プロジェクト: dtbinh/dviz
void VideoWidget::setOverlaySource(VideoSource *source)
{
	if(m_overlaySource)
		disconnectOverlaySource();
		
	m_overlaySource = source;
	
	if(source)
	{
		connect(m_overlaySource, SIGNAL(frameReady()), this, SLOT(overlayFrameReady()), Qt::QueuedConnection);
		connect(m_overlaySource, SIGNAL(destroyed()), this, SLOT(sourceDestroyed()));
		m_overlaySource->registerConsumer(this);
		
		overlayFrameReady();
	}
}
コード例 #19
0
/*!
 * \fn UvAbstractFrameProcessor::UvAbstractFrameProcessor(UvAbstractFrameTransceiver *transceiver)
 * \brief Construct a frame processor.
 */
UvAbstractFrameProcessor::UvAbstractFrameProcessor(UvAbstractFrameTransceiver *transceiver)
{
    d = new UvAbstractFrameProcessorPrivate;
    UV_CHK_PTR(d);

    d->transceiver = transceiver;
    UV_CHK_PTR(d->transceiver);
    connect(d->transceiver, SIGNAL(frameReady()), this, SLOT(receiveFrame()));

    //create and start frame parsing and sending thread
    d->parsingThread = new UvFrameParsingThread(this);
    UV_CHK_PTR(d->parsingThread);
    d->parsingThread->start();
    d->sendingThread = new UvFrameSendingThread(this);
    UV_CHK_PTR(d->sendingThread);
    d->sendingThread->start();

    d->sendingDelayTime = 0;
}
コード例 #20
0
void GLImageHttpDrawable::setUrl(const QString& url)
{
	m_url = url;
	if(url.isEmpty())
		return;
		
	QUrl parsedUrl(url);
	//qDebug() << "GLImageHttpDrawable::setUrl: URL Sceme:"<<parsedUrl.scheme();
	if(parsedUrl.scheme() == "raw")
	{
		qDebug() << "GLImageHttpDrawable::setUrl: Connecting to RAW host: "<<parsedUrl;
		
		releaseVideoReceiver();
		
		m_rx = VideoReceiver::getReceiver(parsedUrl.host(), parsedUrl.port());
		if(m_rx)
		{
			m_rx->registerConsumer(this);
			connect(m_rx, SIGNAL(frameReady()), this, SLOT(videoRxFrameReady()));
		}
		
		qDebug() << "GLImageHttpDrawable::setUrl: Got receiver:"<<m_rx;
		emit videoReceiverChanged(m_rx);
		m_pollDvizTimer.stop();
		m_pollImageTimer.stop();
		
		// Pull in first frame if one is available
		videoRxFrameReady();
	}
	else
	//if(liveStatus())
	{
		releaseVideoReceiver();
		
		qDebug() << "GLImageHttpDrawable::setUrl: Connecting to HTTP host: "<<parsedUrl;
		if(m_pollDviz)
			initDvizPoll();
		else
			initImagePoll();
	}
}
コード例 #21
0
ファイル: VideoWidget.cpp プロジェクト: dtbinh/dviz
void VideoWidget::fadeStart(bool switchThreads)
{
	// If we're fading in with no thread to start with,
	// then we can't very well use the m_oldThread ptr now can we? :-)
	if(m_thread && switchThreads)
	{
		disconnect(m_thread, 0, this, 0);
		
		m_oldThread = m_thread;
		m_oldSourceRect = m_sourceRect;
		m_oldTargetRect = m_targetRect;
		
		m_thread = 0;
		
		connect(m_oldThread, SIGNAL(frameReady()), this, SLOT(oldFrameReady()));
	}
	
	if(!m_fadeToBlack)
		m_opacity = 0.0;
	else
		m_opacity = 1.0;

		
	// draw the old thread in the current threads place - see drawing code
	
// 	qDebug() << "VideoWidget::fadeStart("<<switchThreads<<"): m_opacity:"<<m_opacity;
	m_fadeTimer.start();
	
	m_fadeElapsed.start();
	m_predictedFadeClock = 0;
	
	double fps = m_forceFps > 0.0 ? m_forceFps : 30.0;
	//double sec = (m_fadeLength > 0 ? m_fadeLength : 1000.0) / 1000.0;
	m_predictedClockInc = 1000.0 / fps;
	
	
	if(m_oldThread)
		oldFrameReady();
}
コード例 #22
0
ファイル: CameraModule.cpp プロジェクト: guyvdb/Multi-Touch
    /* -------------------------------------------------------------------------------------------
     *
     * ------------------------------------------------------------------------------------------- */
    void CameraModule::tick() {
      cv::Mat frame;

      qDebug() << "tick";

      if(this->capture->read(frame)) {
        if(frame.data != 0x0) {
          this->frameCount++;
          qDebug() << "data ok";
          emit frameReady(this, "OUTPUT", frame);
        }

        if(frameCount > 100) {
          this->save("camera",frame);
          // reset the framerate counter
          this->frameRate = this->frameCount / (this->time.elapsed()/1000);
          this->frameCount = 0;
          this->time.start();
          qDebug() << "device: " << QString::number(this->setting("device")->asInteger()) <<  " fps: " << this->frameRate;
        }
      }

    }
コード例 #23
0
ファイル: webcams.cpp プロジェクト: Fale/qtmoko
void V4L1Preview::captureFrame()
{
    // Start capturing the next frame (we alternate between 0 and 1).
    int frame = wc->currentFrame;
    struct video_mmap capture;
    if ( wc->mbuf.frames > 1 ) {
        wc->currentFrame = !wc->currentFrame;
        capture.frame = wc->currentFrame;
        capture.width = wc->width;
        capture.height = wc->height;
        capture.format = VIDEO_PALETTE_RGB32;
        ioctl( wc->fd, VIDIOCMCAPTURE, &capture );
    }

    // Wait for the current frame to complete.
    ioctl( wc->fd, VIDIOCSYNC, &frame );


    preview_buffer_width = wc->width;
    preview_buffer_height = wc->height;
    preview_buffer_data = wc->frames + wc->mbuf.offsets[frame];

    QVideoFrame vframe(QVideoFrame::Format_RGB32, QSize(wc->width, wc->height),
                        reinterpret_cast<uchar*>(preview_buffer_data));
    emit frameReady(vframe);

    // Queue up another frame if the device only supports one at a time.
    if ( wc->mbuf.frames <= 1 ) {
        capture.frame = wc->currentFrame;
        capture.width = wc->width;
        capture.height = wc->height;
        capture.format = VIDEO_PALETTE_RGB32;
        ioctl( wc->fd, VIDIOCMCAPTURE, &capture );
    }

}
コード例 #24
0
ファイル: CannyModule.cpp プロジェクト: guyvdb/Multi-Touch
 /* -------------------------------------------------------------------------------------------
  *
  * ------------------------------------------------------------------------------------------- */
 void CannyModule::OnFrame(mtv::Module *module, const QString name, cv::Mat &matrix) {
   cv::Mat frame(matrix.size(),matrix.depth(), cv::Scalar(255));
   cv::Canny(matrix,frame,this->setting("lower-threshold")->asInteger(), this->setting("upper-threshold")->asInteger());
   emit frameReady(this,"OUTPUT",frame);
 }
コード例 #25
0
ファイル: JpegServer.cpp プロジェクト: dtbinh/dviz
void JpegServer::generateNextFrame()
{
	if(!m_scene || !MainWindow::mw())
		return;
		
	if(m_onlyRenderOnSlideChange &&
	   !m_slideChanged &&
	   !m_cachedImage.isNull())
	{
		//qDebug() << "JpegServer::generateNextFrame(): Hit Cache";
		emit frameReady(m_cachedImage);
		if(m_sender)
			m_sender->transmitImage(m_cachedImage);
		return;
	}
	
	if(m_onlyRenderOnSlideChange)
	{
		m_slideChanged = false;
		//qDebug() << "JpegServer::generateNextFrame(): Cache fallthru ...";
	}
	
	//qDebug() << "JpegServer::generateNextFrame(): Rendering scene "<<m_scene<<", slide:"<<m_scene->slide();
	
	m_time.start();
	
	QImage image(FRAME_WIDTH,
	             FRAME_HEIGHT,
		     FRAME_FORMAT);
	memset(image.scanLine(0), 0, image.byteCount());
	
	QPainter painter(&image);
	painter.fillRect(image.rect(),Qt::transparent);
	painter.setRenderHint(QPainter::SmoothPixmapTransform, false);
	painter.setRenderHint(QPainter::Antialiasing, false);
	painter.setRenderHint(QPainter::TextAntialiasing, false);
	
	if(!m_sourceRect.isValid())
		m_sourceRect = MainWindow::mw()->standardSceneRect();
		
	if(m_sourceRect != m_targetRect)
		updateRects();
	
	m_scene->render(&painter,
		m_targetRect,
		m_sourceRect);
	
	painter.end();
	
 	emit frameReady(image);
	
	if(m_sender)
	{
		//qDebug() << "JpegServer::generateNextFrame(): Sending image via VideoSender";
		m_sender->transmitImage(image);
	}
	else
	{
		//qDebug() << "JpegServer::generateNextFrame(): No VideoSender created";
	}
	
	if(m_onlyRenderOnSlideChange)
		m_cachedImage = image;
	
// 	QImageWriter writer("frame.png", "png");
// 	writer.write(image);

	m_frameCount ++;
	m_timeAccum  += m_time.elapsed();
	
// 	if(m_frameCount % (m_fps?m_fps:10) == 0)
// 	{
// 		QString msPerFrame;
// 		msPerFrame.setNum(((double)m_timeAccum) / ((double)m_frameCount), 'f', 2);
// 	
// 		qDebug() << "JpegServer::generateNextFrame(): Avg MS per Frame:"<<msPerFrame<<", threadId:"<<QThread::currentThreadId();
// 	}
// 			
// 	if(m_frameCount % ((m_fps?m_fps:10) * 10) == 0)
// 	{
// 		m_timeAccum  = 0;
// 		m_frameCount = 0;
// 	}
	
	//qDebug() << "JpegServer::generateNextFrame(): Done rendering "<<m_scene;
}
コード例 #26
0
ファイル: logger.cpp プロジェクト: lmhtz/bldc-logger
Logger::Logger(QObject *parent) :
    QObject(parent)
{
    mPort = new SerialPort(this);
    mPacketInterface = new PacketInterface(this);

    mValueFile = new QFile("Data/BLDC_Values");
    mPrintFile = new QFile("Data/BLDC_Print");

    mValueFile->open(QIODevice::WriteOnly | QIODevice::Text);
    mPrintFile->open(QIODevice::WriteOnly | QIODevice::Text);

    mValueStream = new QTextStream(mValueFile);
    mPrintStream = new QTextStream(mPrintFile);

    mPort->openPort("/dev/ttyACM0");

    // Video
    mVidW = 1280;
    mVidH = 720;
    mVidFps = 25.0;
    mFAudioSamp = 44100;

    mFrameGrabber = new FrameGrabber(mVidW, mVidH, mVidFps, 0, this);
    mFrameGrabber->start(QThread::InheritPriority);
    mPlotter = new FramePlotter(this);
    mPlotter->start(QThread::InheritPriority);

    mCoder = new VideoCoder(mVidW, mVidH, mVidFps, "Data/v_video.avi", this);
    mCoder->start(QThread::InheritPriority);

    // Audio recording
    mTimer = 0;
    mAudio = 0;

    if (QAudioDeviceInfo::availableDevices(QAudio::AudioInput).size() > 0) {
        mAudioFile.setFileName("Data/v_audio.raw");
        mAudioFile.open(QIODevice::WriteOnly | QIODevice::Truncate);

        QAudioFormat format;
        // Set up the desired format, for example:
        format.setSampleRate(mFAudioSamp);
        format.setChannelCount(1);
        format.setSampleSize(8);
        format.setCodec("audio/pcm");
        format.setByteOrder(QAudioFormat::LittleEndian);
        format.setSampleType(QAudioFormat::UnSignedInt);

        QAudioDeviceInfo info = QAudioDeviceInfo::defaultInputDevice();
        if (!info.isFormatSupported(format)) {
            qWarning() << "Default format not supported, trying to use the nearest.";
            format = info.nearestFormat(format);
        }

        mAudio = new QAudioInput(format, this);
        mAudio->setNotifyInterval(1000 / mVidFps);
        mAudio->start(&mAudioFile);
    } else {
        mTimer = new QTimer(this);
        mTimer->setInterval(1000 / mVidFps);
        mTimer->start();
    }

    mConsoleReader = new ConsoleReader(this);

    connect(mConsoleReader, SIGNAL(textReceived(QString)),
            this, SLOT(consoleLineReceived(QString)));

    connect(mPort, SIGNAL(serial_data_available()),
            this, SLOT(serialDataAvailable()));

    if (mTimer != 0) {
        connect(mTimer, SIGNAL(timeout()), this, SLOT(timerSlot()));
    }

    if (mAudio != 0) {
        connect(mAudio, SIGNAL(notify()),
                this, SLOT(audioNotify()));

        // Lower the volume to avoid clipping. This seems to be passed to
        // pulseaudio.
        mAudio->setVolume(0.1);
    }

    connect(mPacketInterface, SIGNAL(dataToSend(QByteArray&)),
            this, SLOT(packetDataToSend(QByteArray&)));
    connect(mPacketInterface, SIGNAL(valuesReceived(PacketInterface::MC_VALUES)),
            this, SLOT(mcValuesReceived(PacketInterface::MC_VALUES)));
    connect(mPacketInterface, SIGNAL(printReceived(QString)),
            this, SLOT(printReceived(QString)));
    connect(mPacketInterface, SIGNAL(samplesReceived(QByteArray)),
            this, SLOT(samplesReceived(QByteArray)));
    connect(mPacketInterface, SIGNAL(rotorPosReceived(double)),
            this, SLOT(rotorPosReceived(double)));
    connect(mPacketInterface, SIGNAL(experimentSamplesReceived(QVector<double>)),
            this, SLOT(experimentSamplesReceived(QVector<double>)));

    connect(mPlotter, SIGNAL(frameReady(QImage)),
            mCoder, SLOT(setNextFrame(QImage)));
}
コード例 #27
0
ファイル: BinaryModule.cpp プロジェクト: guyvdb/Multi-Touch
 /* -------------------------------------------------------------------------------------------
  *
  * ------------------------------------------------------------------------------------------- */
 void BinaryModule::OnFrame(mtv::Module *module, const QString name, cv::Mat &matrix) {
   cv::Mat frame(matrix.size(),matrix.depth(), cv::Scalar(255));
   int t = this->setting("threshold")->asInteger();
   cv::threshold(matrix, frame, t,255,cv::THRESH_BINARY);
   emit frameReady(this,"OUTPUT",frame);
 }