Ejemplo n.º 1
0
GLDualCamView::GLDualCamView(QWidget *parent)
	: QGLWidget(parent)
{
	isCaptureEnabled = false;
	isProcessingEnabled = false;
	_isFilterInited = false;
	
	_capture = 0;
	_filter = 0;

	// Ýlk tanýmlý kameranýn tutacaðýný al. (tutacak = handle).
	_capture = cvCaptureFromCAM( 0 ); 
	if( !_capture )
	{
		// Tutacak geçersiz ise programdan çýk.
		qDebug() << "Kamera tutaca?? edinilemedi...\n";
		return;
	}

	_rawFrame = 0;
	_processedFrame = 0;

	/* Setup the timer. */
	_timer = new QTimer();

	connect(_timer, SIGNAL(timeout()), this, SLOT(captureFrame())); 

	_timer->start(33); /* ~30fps */
}
Ejemplo n.º 2
0
void IdleCallback()
{
	XnStatus nRetVal = XN_STATUS_OK;
	if (g_bPause != TRUE)
	{
		// read a frame
		readFrame();
        
		// capture if needed
		nRetVal = captureFrame();
		if (nRetVal != XN_STATUS_OK)
		{
			displayMessage("Error capturing frame: '%s'", xnGetStatusString(nRetVal));
		}
        
		// add to statistics
		//statisticsAddFrame();
	}
    
	if (g_bStep == TRUE)
	{
		g_bStep = FALSE;
		g_bPause = TRUE;
	}
    
	glutPostRedisplay();
}
void Camera::start() {

    /* starting event loop, capturing fresh images */
    eventLoopTimer = new QTimer();
    connect(eventLoopTimer, SIGNAL(timeout()), this, SLOT(captureFrame()));
    eventLoopTimer->start();
}
Ejemplo n.º 4
0
void VideoInput::process()
{
    bool newDecoderCreated = false;

    if (switchPending_.exchange(false)) {
        deleteDecoder();
        createDecoder();
        newDecoderCreated = true;
    }

    if (not decoder_) {
        loop_.stop();
        return;
    }

    captureFrame();

    if (newDecoderCreated) {
        /* Signal the client about the new sink */
        Manager::instance().getVideoManager()->startedDecoding(sinkID_, sink_.openedName(),
                decoder_->getWidth(), decoder_->getHeight(), false);
        DEBUG("LOCAL: shm sink <%s> started: size = %dx%d",
              sink_.openedName().c_str(), decoder_->getWidth(),
              decoder_->getHeight());
    }
}
Ejemplo n.º 5
0
Spectrometer::Spectrometer(int videoSrc)
{
    iFrame = 0;
    nFramesInteg = 1;

    measuring = false;
    //useReference = false;
    isXCalibrated = false;
    //isYCalibrated = false;

    spectrumRect = QRect(QPoint(10,100),QPoint(300,130)).normalized();
    pixData.resize(spectrumRect.width()-1);
    pixDataRed.resize(spectrumRect.width()-1);
    pixDataGreen.resize(spectrumRect.width()-1);
    pixDataBlue.resize(spectrumRect.width()-1);
    pixNum.resize(spectrumRect.width()-1);

    cap=cv::VideoCapture(videoSrc);
    if(cap.isOpened()){
        ok=true;
        connect(&timerCapture,SIGNAL(timeout()),this,SLOT(captureFrame()));
        cap.set(CV_CAP_PROP_GAIN,0.5);
        //cap.set(CV_CAP_PROP_AUTO_EXPOSURE,0);
        //cap.set(CV_CAP_PROP_FPS,10);
        cap.set(CV_CAP_PROP_SATURATION,0);
    }
    else {
        ok=false;
    }
}
Ejemplo n.º 6
0
void
VideoInput::process()
{
    if (switchPending_)
        createDecoder();

    if (not captureFrame()) {
        loop_.stop();
        return;
    }
}
Ejemplo n.º 7
0
 odcore::data::image::SharedImage Camera::capture() {
     if(isValid()) {
         if(captureFrame()) {
             if(m_sharedMemory.get() && m_sharedMemory->isValid()) {
                 m_sharedMemory->lock();
                 copyImageTo((char*) m_sharedMemory->getSharedMemory(), m_size);
                 m_sharedMemory->unlock();
             }
         }
     }
     return m_sharedImage;
 }
Ejemplo n.º 8
0
void ofApp::draw(){
    fbo.begin();
    drawAnim();
    fbo.end();
    captureFrame();
    fbo.draw(0, 0);
    ofDrawBitmapString(
        "Recording to frame #" +
        ofToString(saveOnFrame) +
        " at " +
        ofToString(framerate) +
        "fps...\nCurrent frame: " +
        ofToString(ofGetFrameNum()) +
        "\n" + nowSaved,
        20, height - 50);
}
Ejemplo n.º 9
0
///////////////////////////////////////////////////////////////////////
//the actual render function, which is called for each frame
void renderScene(void)
{
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

  glMatrixMode(GL_PROJECTION);
  glPushMatrix();
  gluLookAt(0.0,0.0,1.0,0.0,0.0,-1.0,0.0f,1.0f,0.0f);
  glMatrixMode(GL_MODELVIEW);
  glPushMatrix();

  glLightfv(GL_LIGHT0, GL_POSITION, lpos);
  glTranslatef(move_x, move_y, 0.0);
  glTranslatef(0.0, 0.0, translate_z);
  glRotatef(rotate_x, 1.0, 0.0, 0.0);
  glRotatef(rotate_y, 0.0, 1.0, 0.0);
  GL_CHECK(glUseProgram(p));
  GLint loc = glGetUniformLocation(p, "level");
  if (loc != -1)
  {
    GL_CHECK(glUniform1i(loc, subdivLevel));
  }
  gettimeofday(&time_, NULL);
  loc = glGetUniformLocation(p, "time");
  if (loc != -1)
  {
    GL_CHECK(glUniform1f(loc, time_.tv_usec/100000.0));
  }
  glPolygonMode( GL_FRONT_AND_BACK, GL_LINE );
  glutSolidTeapot(0.5);
  GL_CHECK(glUseProgram(0));

  glMatrixMode(GL_MODELVIEW);
  glPopMatrix();
  glMatrixMode(GL_PROJECTION);
  glPopMatrix();


  //helper function for submission. Will capture 2nd frame of task
  frameCaptured++;
  if (frameCaptured == 2)
  {
    renderID(win_width, win_height);
    captureFrame();
  }
	GL_CHECK(glutSwapBuffers());
}
///////////////////////////////////////////////////////////////////////
//the actual render function, which is called for each frame
void renderScene(void)
{
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

  glMatrixMode(GL_PROJECTION);
  glPushMatrix();
  gluLookAt(0.0, 0.0, 1.0, 0.0, 0.0, -1.0, 0.0f, 1.0f, 0.0f);
  glMatrixMode(GL_MODELVIEW);
  glPushMatrix();

  glLightfv(GL_LIGHT0, GL_POSITION, lpos);
  /////////////////////////////////////////////////
  //TODO add scene interaction code here

  //Middle button translation
  glTranslatef(move_x, move_y, 0.0);

  //Left button rotation
  glRotatef(rotate_y, 1.0, 0.0, 0.0);
  glRotatef(rotate_x, 0.0, 1.0, 0.0);

  //Right button zoom
  glTranslatef(0.0, 0.0, translate_z);


  /////////////////////////////////////////////////
  GL_CHECK(glUseProgram(p));
  glutSolidTeapot(0.5);
  GL_CHECK(glUseProgram(0));

  glMatrixMode(GL_MODELVIEW);
  glPopMatrix();
  glMatrixMode(GL_PROJECTION);
  glPopMatrix();

  //helper function for submission. Will capture 2nd frame of task
  frameCaptured++;
  if (frameCaptured == 2)
  {
    renderID(win_width, win_height);
    captureFrame();
  }

  GL_CHECK(glutSwapBuffers());
}
Ejemplo n.º 11
0
void AbstractImageGrabber::grab()
{
    QImage frame;//this stores grabbed image
    QEventLoop latencyLoop;
    QElapsedTimer timer;

    if (!m_timer) {
        timer.start();
    }

    m_prevPts = -1;
    int pts = -1;

    Q_FOREVER {
        frame = captureFrame();
        
        setGrabbedFrameCount(grabbedFrameCount() + 1);
        
        pts = m_timer ? m_timer->elapsed() : timer.elapsed();
        if (m_prevPts != pts) {
            m_prevPts = pts;
            Q_EMIT frameAvailable(frame, pts);
        }

        //check if we must finish grabbing
        if (isStopRequest() || isPauseRequest())
            break;

        //wait for set by user milliseconds
        QTimer::singleShot(latency(), &latencyLoop, SLOT(quit()));
        latencyLoop.exec();
    }

    setState(isStopRequest() ? AbstractGrabber::StoppedState : AbstractGrabber::SuspendedState);

    if (isStopRequest())
        m_prevPts = -1;

    //reset stop and pause flags
    setStopRequest(false);
    setPauseRequest(false);
}
Ejemplo n.º 12
0
void ofApp::draw(){
    fbo.begin();
    drawAnim();
    fbo.end();
    
    if(!renderingNow) {
        captureFrame();
    }
    
    ofSetColor(255, 255, 255, 255);
    fbo.draw(0, 0);
    ofDrawBitmapString(
                       "Recording to frame #" +
                       ofToString(saveOnFrame) +
                       " at " +
                       ofToString(ofGetFrameRate()) +
                       "fps...\nCurrent frame: " +
                       ofToString(ofGetFrameNum()) +
                       "\n" + renderMessage,
                       20, height - 50);
}
Ejemplo n.º 13
0
bool CameraManager::update() { 
	if(!camera) {
		ofLog(OF_LOG_ERROR, "CameraManager not initialised"); 
		return false; 
	}; 
	
//	cout << toggleShowUSBControls->value.getValueB() << endl; 
//
    /*
	if(toggleShowUSBControls->value.getValueB()){
		
		cameraVidGrabber->videoSettings(); 
		toggleShowUSBControls->value.setValue(0); 
		toggleShowUSBControls->update();
	}*/
	
	
	
	if(camera->getGain()!=gainParam) camera->setGain(gainParam);
	if(camera->getGamma()!=gammaParam) camera->setGamma(gammaParam);
	if(camera->getShutter()!=shutterParam) camera->setShutter(shutterParam);
	if(camera->getBrightness()!=brightnessParam) camera->setBrightness(brightnessParam);
	
	bool updateCamera = camera->update();
    if( updateCamera && capturing ) {
        captureFrame();
    }
	/*
	for(int i = 0; i<cameras.size(); i++) {
		
		CameraIP * cam = dynamic_cast<CameraIP*>(cameras[i]);
		if((cam) && (cam!=camera)) {
			cam->update();
			
		}
	}*/
	

    return updateCamera;
}
Ejemplo n.º 14
0
//--------------------------------------------------------------
void testApp::keyReleased(int key){
    switch (key) {
        case ' ':
            captureFrame();
            break;
        case 's':
            cout <<"start saving\n" << endl;
            gifEncoder.save("gifs/"+ofGetTimestampString()+".gif");
            saveImages();
            gifEncoder.reset();
            
            break;
        default:
            break;
			
		//case 'p':
			
			//break;

			
    }
}
Ejemplo n.º 15
0
void Lepton::startCapture() {

    std::chrono::time_point<std::chrono::system_clock> start, end;
    std::chrono::duration<float> elapsed_seconds;
    float leptonPeriodSeconds = 1 / LEPTON_FPS;

    // regularly poll the lepton
    while (1) {
        start = std::chrono::system_clock::now();

        captureFrame();

        end = std::chrono::system_clock::now();
        elapsed_seconds = end-start;

        if (elapsed_seconds.count() < leptonPeriodSeconds) {
            unsigned int sleepTimeMicros = (unsigned int) (leptonPeriodSeconds - elapsed_seconds.count()) * 1000 * 1000;
            usleep(sleepTimeMicros);
        }

    }

}
Ejemplo n.º 16
0
int main(int argc, char **argv)
{
    if(argc != 6) {
        std::cout << "wrong args" << std::endl;
        return 1;
    }
    const char *sensorDeviceName = argv[1];
    const char *actuatorDeviceName = argv[2];
    const int cameraDeviceNum = atoi(argv[3]);
    const int port = atoi(argv[4]);
    const char *mode = argv[5];

    bool showThermal = false;
    bool showRGB = false;
    if(mode[0] == 't') {
        showThermal = true;
    } else if(mode[0] == 'r') {
        showRGB = true;
    } else {
        throw "wtf";
    }

    std::cout << "blah" << std::endl;

    std::shared_ptr<ApplicationCore> core = ApplicationCore::instantiate();

    auto sc = std::make_shared<ThermalSensorController>(core, sensorDeviceName, 115200);
    auto rc = std::make_shared<RgbController>(core, cameraDeviceNum);
    auto ac = std::make_shared<ActuatorController>("/dev/tty.usbserial-A9S3VTXD");
    auto ns = std::make_shared<NetService>(core);

    sc->init();
    rc->init();
    ac->init();
    ns->init(port);

    boost::asio::deadline_timer timer(*core->getIOService());
    std::function<void(const boost::system::error_code&)> captureStuff;
    GyroReading gyroReading;
    ThermoReading thermoReading;
    captureStuff = [&](const boost::system::error_code& /*e*/) { 
        //
        cv::Vec2d pos = ac->getCurrentPosition();

        rc->captureFrame();
        auto rgbFrame = rc->popFrame();

        if(showRGB && rgbFrame->rows > 0) {
            rapidjson::Document doc;
            auto &aloc = doc.GetAllocator();
            doc.SetObject();

            cv::Mat imgMat(rgbFrame->rows, rgbFrame->cols, CV_8UC4, cv::Scalar::all(0.0));
            cv::cvtColor(*rgbFrame, imgMat, CV_BGR2RGBA, 4); 

            cv::Size size(rgbFrame->cols*0.2, rgbFrame->rows*0.2);
            cv::resize(imgMat, imgMat, size);

            std::string imgDataB64 = tobase64(imgMat.data, imgMat.total()*4*sizeof(byte));
            rapidjson::Value val;
            val.SetString(imgDataB64.c_str(), doc.GetAllocator());
            doc.AddMember("data", val, aloc);

            doc.AddMember("type", "rgb_data", aloc);
            doc.AddMember("yaw", -pos[0], aloc);
            doc.AddMember("pitch", -pos[1], aloc);
            doc.AddMember("dataWidth", imgMat.cols, aloc);
            doc.AddMember("dataHeight", imgMat.rows, aloc);
            doc.AddMember("yawSize", 63.625, aloc);
            doc.AddMember("pitchSize", 35.789, aloc);

            ns->sendWSDoc(doc);
        }


        /*if(sc->popGyroReading(gyroReading)) {
            printf("Roll: %f, Pitch: %f, Yaw: %f.\n",
                gyroReading.roll, gyroReading.pitch, gyroReading.yaw
            );
        }*/
        sc->requestThermoReading();

        std::cout << "tick: " << timer.expires_at() << std::endl;

        if(showThermal && sc->popThermoReading(thermoReading)){
            rapidjson::Document doc;
            auto &aloc = doc.GetAllocator();
            doc.SetObject();
            doc.AddMember("type", "thermo_data", aloc);
            doc.AddMember("yaw", -pos[0], aloc);
            doc.AddMember("pitch", -pos[1], aloc);

            cv::Mat imgMat(4, 16, CV_8UC4, cv::Scalar::all(0.0));
            cv::Mat mat = thermoReading.img;

            for(int i = 0; i < mat.total(); i++) {
                int y = 3-(i%4);
                int x = i/4;
                double temp = mat.at<float>(0, i);

                if(
                        (x == 11 && y == 2)
                    ||  (x == 11 && y == 3)
                    ||  (x == 12 && y == 2)
                ) {
                    temp += 10.0;
                }

                //std::cout << (int)temp << " ";

                cv::Vec4b col = hsv(
                    300-300.0*(std::max(temp, 14.0)-14.0)/(40.0-14.0),
                    1, 1
                );
                if(temp <= 11.0) {
                    col = cv::Vec4b(30, 30, 50, 255);
                } else if(temp > 40.0) {
                    col = cv::Vec4b(255, 255, 255, 255);
                }
                imgMat.at<cv::Vec4b>(y, x) = col;
                //std::cout << std::endl;
            }

            std::string imgDataB64 = tobase64(imgMat.data, imgMat.total()*4*sizeof(byte));
            rapidjson::Value val;
            val.SetString(imgDataB64.c_str(), doc.GetAllocator());
            doc.AddMember("data", val, aloc);

            ns->sendWSDoc(doc);
        }

        timer.expires_from_now(boost::posix_time::milliseconds(interval));
        timer.async_wait(captureStuff);
    };
    timer.expires_from_now(boost::posix_time::milliseconds(interval));
    timer.async_wait(captureStuff);

    ns->registerCallback("move_actuator", [&](const rapidjson::Document &doc) {
        ac->stop();
        ActuatorMoveOrder order;
        order.posDeg = cv::Vec2d(
            std::max(-150.0, std::min(150.0, -doc["yaw"  ].GetDouble()/M_PI*180)),
            std::max(- 90.0, std::min( 90.0, -doc["pitch"].GetDouble()/M_PI*180))
        );
        order.duration = 3.5;
        ac->queueMove(order);
    });
    
    std::cout << "run" << std::endl;
    core->run();
}
Ejemplo n.º 17
0
///////////////////////////////////////////////////////////////////////
//the actual render function, which is called for each frame
void renderScene(void)
{
	GLfloat modelViewMatrix[16]; 
	GLfloat projectionMatrix[16]; 

	glBindTexture(GL_TEXTURE_2D, 0);
	glBindFramebuffer(GL_FRAMEBUFFER, fb);
	{
	//render to texture

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    glMatrixMode(GL_PROJECTION);
    glPushMatrix();
    gluLookAt(0.0,0.0,1.0,0.0,0.0,-1.0,0.0f,1.0f,0.0f);
    glMatrixMode(GL_MODELVIEW);
    glPushMatrix();

    glLightfv(GL_LIGHT0, GL_POSITION, lpos);
    glTranslatef(move_x, move_y, 0.0);
    glTranslatef(0.0, 0.0, translate_z);
    glRotatef(rotate_x, 1.0, 0.0, 0.0);
    glRotatef(rotate_y, 0.0, 1.0, 0.0);
    glGetFloatv(GL_MODELVIEW_MATRIX, modelViewMatrix); 
    glGetFloatv(GL_PROJECTION_MATRIX, projectionMatrix); 
    glutSolidTeapot(0.5);

    glMatrixMode(GL_MODELVIEW);
    glPopMatrix();
    glMatrixMode(GL_PROJECTION);
    glPopMatrix();

	}
	glBindFramebuffer(GL_FRAMEBUFFER, 0);

	// now render to the screen using the texture...
	glClearColor(0.0, 0.0, 0.0, 0.0);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glMatrixMode(GL_MODELVIEW);
	glPushMatrix();
	glLoadIdentity();
	 
	// draw textured quad
	glUseProgram(p);

	GLint loc = glGetUniformLocation(p, "modelViewMatrix");
	if (loc != -1)
	{
		glUniformMatrix4fv( loc, 1, GL_FALSE, modelViewMatrix); 
	}
	loc = glGetUniformLocation(p, "projectionMatrix");
	if (loc != -1)
	{
		glUniformMatrix4fv( loc, 1, GL_FALSE, projectionMatrix); 
	}
	    
	glBindTexture(GL_TEXTURE_2D, tex);
	glEnable(GL_TEXTURE_2D);  

	glColor3f(1.0, 1.0, 1.0);
	glBegin(GL_QUADS);
	{
		glTexCoord2f(0,         0);        glVertex2f(-1, -1);
		glTexCoord2f(texWidth, 0);         glVertex2f( 1, -1);
		glTexCoord2f(texWidth, texHeight); glVertex2f( 1,  1);
		glTexCoord2f(0,        texHeight); glVertex2f(-1,  1);
	}
	glEnd();

	glPopMatrix();
	glDisable(GL_FRAGMENT_PROGRAM_ARB);
	glUseProgram(0);

  //helper function for submission. Will capture 2nd frame of task
  frameCaptured++;
  if (frameCaptured == 2)
  {
    renderID(win_width, win_height);
    captureFrame();
  }

	GL_CHECK(glutSwapBuffers());
}
void SimplePipeReader::run()
{
  if(debug) std::cout << "SimplePipeReader: start()" << std::endl;
  bool closing=false;
  QByteArray charList;
//   int counter = 0;
  while ( !closing ) {
    usleep(1000);
    QString line = input_line->readLine ();
    //       std::cout << "SimplePipeReader: read " << line.size() << " chars" << std::endl;
    if ( line.isEmpty() ) continue;
    else if ( line.startsWith ( "#QUIT" ) ) {
      if(debug)  std::cout << "SimplePipeReader: have seen #QUIT !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" << std::endl;
      closing = true;
      break;
    } else if ( line.startsWith ( "#RESET" ) ) {
      if(debug) std::cout << "SimplePipeReader: have seen #RESET **************************" << std::endl;


    } else if ( line.startsWith ( "#V" ) ) { // video recording -> capture frames
      if(!noVideo){
        QStringList pieces = line.split(" ");
        if(pieces.length()<3) { std::cout << "got" << line.toStdString()
                                          << ", but missing parameters expect \"#V idx directory\"" << std::endl;
        }else{
          long int cnt=pieces.at(1).toLong();
          emit captureFrame(cnt, pieces.at(2));
          waitUntilGo(); // wait for gui thread
        }
      }
    } else if ( line.startsWith ( "#IN" ) ) {  // Name
      emit sourceName(line.mid(4));
    }

    if (debug) std::cout << "currDatalin: " << (currentDataLine.section(' ', 0, 0)).toDouble();
    if (debug) std::cout << "oldline: " << (line.section(' ', 0, 0)).toDouble() << std::endl;

    if ( line.startsWith ( "#C" ) ) {
      //cut the first 2 chars (#C)
      line = line.mid ( 3 );
      currentChannelLine = line;
      //         std::cout << "SimplePipeReader currentChannelLine: [" << currentChannelLine.toStdString() << "]" << std::endl;
    } else if ( line.startsWith ( "#" ) ) {
      continue;
    } else if ( (currentChannelLine.size() > 2)
                && (line.section(' ', 0, 0) != currentDataLine.section(' ', 0, 0))) {
      while(waitForGui){ msleep(100); }
      currentDataLine = line;
      emit newData(); //wenn timestamp geändert (erstes element)
    }

    //      if ( line.startsWith ( "#D" ) ) {
    //          //cut the first 2 chars (#C)
    //          line = line.mid ( 3 );
    //          currentDescriptionLine = line;
    ////         printf("SimplePipeReader currentDescriptionLine: [%s]\r\n",currentDescriptionLine.toStdString().c_str());
    //      }
  }

  if(debug) std::cout << "SimplePipeReader SIGNAL(finished())" << std::endl;
  emit(finished());
}