void PhotoBomberApp::selectAspectRatio(bb::cascades::multimedia::Camera *camera, const float aspect)
{
    CameraSettings camsettings;
    camera->getSettings(&camsettings);

    // Get a list of supported resolutions.
    QVariantList reslist = camera->supportedCaptureResolutions(CameraMode::Photo);

    // Find the closest match to the aspect parameter
    for (int i = 0; i < reslist.count(); i++) {
        QSize res = reslist[i].toSize();
        qDebug() << "supported resolution: " << res.width() << "x" << res.height();

        // Check for w:h or h:w within 5px margin of error...
        if ((DELTA(res.width() * aspect, res.height()) < 5)
                || (DELTA(res.width(), res.height() * aspect) < 5)) {
            qDebug() << "picking resolution: " << res.width() << "x" << res.height();
            camsettings.setCaptureResolution(res);
            break;
        }
    }

    // Update the camera setting
    camera->applySettings(&camsettings);
}
Esempio n. 2
0
RPiCamera::RPiCamera() :
currentBuf(0),
timeStamp(0),
storedTimeStamp(Platform->getTime())
{
#if DEBUG_NUCAMERA_VERBOSITY > 4
    debug << "RPiCamera::RPiCamera()" << endl;
#endif

    // Set current camera to unknown.
    m_settings.activeCamera = CameraSettings::BOTTOM_CAMERA;

    //Read camera settings from file.
    CameraSettings fileSettings;
    fileSettings.LoadFromFile(CONFIG_DIR + string("Camera.cfg"));
    debug << "Loading settings from " << CONFIG_DIR + string("Camera.cfg") << endl;

    // Open device
    openCameraDevice("/dev/video0");

    //Initialise
    initialiseCamera();
    //readCameraSettings();
    forceApplySettings(fileSettings);

    readCameraSettings();

    loadCameraOffset();

    // enable streaming
    setStreaming(true);
}
Esempio n. 3
0
NAOCamera::NAOCamera() :
currentBuf(0),
timeStamp(0),
storedTimeStamp(Platform->getTime())
{
#if DEBUG_NUCAMERA_VERBOSITY > 4
    debug << "NAOCamera::NAOCamera()" << endl;
#endif

    // Set current camera to unknown.
    m_settings.activeCamera = CameraSettings::UNKNOWN_CAMERA;

    //Read camera settings from file.
    CameraSettings fileSettings;
    fileSettings.LoadFromFile(CONFIG_DIR + string("Camera.cfg"));

    // Open device
    openCameraDevice("/dev/video0");

    // Set Bottom Camera
    setActiveCamera(CameraSettings::BOTTOM_CAMERA);
    initialiseCamera();
    readCameraSettings();
    m_cameraSettings[1] = m_settings;
    //forceApplySettings(fileSettings);
    forceApplySettings(fileSettings);
    
    // set to top camera
    setActiveCamera(CameraSettings::TOP_CAMERA);
    initialiseCamera();
    readCameraSettings();
    m_cameraSettings[0] = m_settings;
    //forceApplySettings(fileSettings);
    forceApplySettings(fileSettings);

    setActiveCamera(CameraSettings::BOTTOM_CAMERA);

    loadCameraOffset();

    // enable streaming
    setStreaming(true);
}
void ApplicationUIBase::set_aspect_ratio(bb::cascades::multimedia::Camera *camera, const float aspect)
{
#define DELTA(x, y) (x>y?(x-y):(y-x))
    CameraSettings camsettings;
    camera->getSettings(&camsettings);
    QVariantList reslist = camera->supportedCaptureResolutions(CameraMode::Photo);

    for (int i=0; i<reslist.count(); i++)
    {
        QSize res = reslist[i].toSize();
        qDebug() << "supported resolution: " << res.width() << "x" << res.height();
        // check for w:h or h:w within 5px margin of error...
        if ((DELTA(res.width() * aspect, res.height()) < 5) || (DELTA(res.width(), res.height() * aspect) < 5))
        {
            qDebug() << "picking resolution: " << res.width() << "x" << res.height();
            camsettings.setCaptureResolution(res);
        }
    }

    camera->applySettings(&camsettings);
}
Esempio n. 5
0
void ChangeSize(int w, int h)
{
  if(h == 0) h = 1;

  // Set Viewport to window dimensions
  glViewport(0, 0, w, h);

  gCamera.viewWidth = w;
  gCamera.viewHeight = h;
  g_projection = gCamera.projection();
  gSystemFont->setViewport(w, h);
}
void cameraSettingsWidget::sendDataToRobot()
{
    /*if(!(getCameraSettingsButton->isEnabled()) &&
       streamCameraSettingsButton->isEnabled() &&
       !(stopStreamCameraSettingsButton->isEnabled()))
    {
        const char* data = "1";
        if (tcpSocket->write(data) == -1)
        {
            qDebug() <<"Failed";
        }


        datasize = 0;
    }*/

    //USING THE JOB INTERFACE: (SENDS -1s to robot)
    CameraSettings tempSettings;
    tempSettings.p_gain.set(0);
    tempSettings.p_exposure.set(0);
    tempSettings.p_contrast.set(0);
    tempSettings.copyParams();
    qDebug() << tempSettings.gain << tempSettings.exposure << tempSettings.contrast;
    static ChangeCameraSettingsJob* camerajob = new ChangeCameraSettingsJob(tempSettings);


    m_job_list->addCameraJob(camerajob);
    m_job_list->summaryTo(debug);

    (*nuio) << m_job_list;

    m_job_list->clear();

    readPacketTimer.start();

}
Esempio n. 7
0
void drawGeometry()
{
  float t = g_appTimer.timeElapsed_s();
  glUseProgram(g_shader.program());
  glUniform1f(uniforms[kTime], t);

  float x = gSceneBounds.centre().x();
  float y = gSceneBounds.centre().y();
  float z = gSceneBounds.centre().z();
  float zX = .5f * gSceneBounds.xSize() / (gCamera.aspect() * tanf(LRadians(.5f * gCamera.fov)));
  float zY = .5f * gSceneBounds.ySize() / tanf(LRadians(.5f * gCamera.fov));
  float zMax = std::max(zX, zY) + .5f * gSceneBounds.zSize();
  LVector modelT(-x, -y , -zMax);

  LTransform centreTransform = LTranslate(0, 0, -1 - zMax) * 
//    LRotateY(LRadians(5.0f * t)) * LRotateX(LRadians(1.0f)) * 
    LRotateY(LRadians(gRotateY)) * LRotateX(LRadians(gRotateX)) *
    LTranslate(-x, -y, -z);

  //  LTransform M = LTranslate(10.0 * sinf(.2*t), -20, -60);
  LTransform M = centreTransform;
  //  LTransform M = LTranslate(modelT.x(), modelT.y(), modelT.z()) * // 10, -120
  //
  //LTransform M = LTranslate(-50.0f, -50, -155.0) * // 10, -120
  //    LRotateX(LRadians(20)) * 
  //    LRotateY(LRadians(0.0f * t)) * 
  //    LRotateX(LRadians(0));
  LTransform MVP = g_projection * M;
  LTransform normalTransform(M.inverse().matrix().transpose());

  glUniformMatrix4fv(uniforms[kModelView], 1, GL_FALSE, M.array());
  glUniformMatrix4fv(uniforms[kMVP], 1, GL_FALSE, MVP.array());
  glUniformMatrix4fv(uniforms[kNormalMatrix], 1, GL_FALSE, normalTransform.array());

  LPoint lp(9.0f*sinf(.9f * t),9.0 + 3.0f * sinf(.8 * t),15);
  glUniform3fv(uniforms[kLightPosition], 1, (float*)&lp);

  std::for_each(gShapes.begin(), gShapes.end(), boost::bind(&Shape::draw, _1));
  checkGLErrors();
}
Esempio n. 8
0
int main(int argc, char *argv[]){

    QApplication application(argc, argv);

    /*
     * XML configuration parser
     */
    ParseNetworkConfigurationXML parser;
    Sink* sink = new Sink();
    vector<Camera*>* cameras = Camera::getCameras();
    vector<Link*> links;
    vector<NetworkItem*> items;

    parser.parse(XML_NETWORK_PATH, items, links, cameras, sink);
    std::cout << "main: XML sink: " << *sink << endl;
    Camera::printCameras();

    /*
     * PKLot configuration parser
     */
    for (uint camIdx = 0;camIdx < cameras->size(); camIdx++){
        Camera* cam = cameras->at(camIdx);
        std::vector<PKLot> pklots;
        cv::Size imgSize(FRAME_W,FRAME_H);
        stringstream ss;
        ss << PKLOT_XML_PATH << cam->getId() << ".xml";
        PKLot::parse(ss.str(),pklots);
        cam->setPKLots(pklots,imgSize);
    }

    /*
     * GUI
     */
    QDesktopWidget desktop;
    QRect mainScreenSize = desktop.availableGeometry(desktop.primaryScreen());

    std::vector<CameraView*> cam_views(cameras->size());



    /* Create the server */
    IoThread* ioThread = IoThread::getInstance(GUI_LISTENING_TCP_PORT);

    //cam_views.reserve(cameras.size());
    for (uint camIdx = 0; camIdx < cameras->size(); camIdx++){

        Camera* curCam = cameras->at(camIdx);
        curCam->setGuiIdx(camIdx);

        std::stringstream ss;
        ss << "Camera " << camIdx+1;
        QString wTitle(ss.str().c_str());

        CameraView* curCamView = new CameraView(curCam, 0, camIdx);

        curCamView->setWindowTitle(wTitle);
        int cwTLx = (10 + curCamView->width())*camIdx;
        int cwTLy = 10;

        curCamView->setGeometry(cwTLx, cwTLy, curCamView->width(), curCamView->height());
        curCamView->show();

        cam_views[camIdx] = curCamView;
    }

    ObjectTracking * task_track = ObjectTracking::getObjectTracking(DB_PATH);

    task_track->setCameras(cameras);
    task_track->setCamViews(cam_views);

    for (vector<Camera*>::iterator camIt = cameras->begin(); camIt != cameras->end(); camIt++){
        task_track->connectTasks(*camIt);
    }

    GuiProcessingSystem* guiProcessingSistem = GuiProcessingSystem::getInstance();

    inverse_BRISK * invBRISK = inverse_BRISK::getinverse_BRISK(guiProcessingSistem->getBriskExtractor(),FRAME_W, FRAME_H);
    invBRISK->build_database(DB_IMAGE_PATH);
    invBRISK->setNCameras(cameras->size());
    invBRISK->setCameras(cameras);

    invBRISK->connectTasks(cameras->size());

    PerformanceManager * perf = PerformanceManager::getPerformanceManager(cameras, 40*1000/1000, 1000,5);

    CameraSettings camSet;

    NetworkTopology net(&camSet, &items, &links, 0);
    camSet.setNetworkTopology(&net);

    int netTLx = 10;
    int netTLy = mainScreenSize.height() - net.geometry().height();

    net.setGeometry(netTLx, netTLy, net.geometry().width(), net.geometry().height());
    /*net.show();*/

    int csTLy = 10;
    int csTLx = mainScreenSize.width() - camSet.geometry().width() - 10;

    camSet.setGeometry(csTLx, csTLy, camSet.geometry().width(), camSet.geometry().height());

    camSet.show();

    std::vector<Plot*> cam_plots(cameras->size());

    for (uint8_t cam = 0; cam < cameras->size(); cam++){

        Plot * p = new Plot(*perf, cam, *(cameras->at(cam)));
        p->setWindowTitle("Camera " + QString::number(cam + 1));
        int plotTLx = mainScreenSize.width() - (10 + p->width())*(cameras->size() - cam);
        int plotTLy = mainScreenSize.height() - p->height();

        p->setGeometry(plotTLx, plotTLy, p->width(), p->height());
        p->show();

        cam_plots.push_back(p);
    }

    MainWindow mainWindow(&camSet, cam_views, &net, cam_plots);

    int wTLx = 20 + net.geometry().width();
    int wTLy = mainScreenSize.height() - net.geometry().height();

    mainWindow.setGeometry(wTLx, wTLy, mainWindow.geometry().width(), mainWindow.geometry().height());
    mainWindow.show();

    // inverse BRISK thread
    QThread* inverseBriskThread = new QThread;
    invBRISK->moveToThread(inverseBriskThread);
    inverseBriskThread->start();

    // object tracking+recognition thread
    QThread* recognitionTrackingThread = new QThread;
    task_track->moveToThread(recognitionTrackingThread);
    recognitionTrackingThread->start();

    perf->connectTasks();

    /* Gentle shutdown */
    signal(SIGINT, GuiNetworkSystem::shutdown);
    signal(SIGQUIT, GuiNetworkSystem::shutdown);

    /* Start the network system */
    ioThread->start();

    return application.exec();
}
void OpenniWrapperNode::initializeOpenni()
{
    Status rc = OpenNI::initialize();
    if (rc != STATUS_OK)
    {
        printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
        return 1;
    } else {
        cout<<"Openni initialized"<<endl;
    }



    OpenNI::addDeviceConnectedListener(&m_devicePrinter);
    OpenNI::addDeviceDisconnectedListener(&m_devicePrinter);
    OpenNI::addDeviceStateChangedListener(&m_devicePrinter);

    openni::Array<openni::DeviceInfo> deviceList;
    openni::OpenNI::enumerateDevices(&deviceList);
    for (int i = 0; i < deviceList.getSize(); ++i)
    {
        printf("Device \"%s\" already connected\n", deviceList[i].getUri());
    }

    m_vDevice.resize(deviceList.getSize());
    m_vColor.resize(deviceList.getSize());
    m_vDepth.resize(deviceList.getSize());

    cout<<"Number of devices connected "<<deviceList.getSize()<<endl;

    for (size_t i=0; i<m_DevicesDefined;i++)
    {
        if (i+1>deviceList.getSize())
        {
            cout<<"Name "<<m_vCameraNamespace[i]<<" defined but the device is not connected."<<endl;
            break;
        }

        m_vDevice[i] = new Device();
        rc = m_vDevice[i]->open(deviceList[i].getUri());
        if (rc != STATUS_OK)
        {
            printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
            return 2;
        } else {
            cout<<"Opened device "<<deviceList[0].getUri()<<"   namespace "<<m_vCameraNamespace[i]<<endl;
        }

        cout<<"This device supports the following sensors ";
        bool sensor = m_vDevice[i]->hasSensor(SENSOR_IR); if (sensor) cout <<"IR ";
        sensor = m_vDevice[i]->hasSensor(SENSOR_COLOR); if (sensor) cout <<"COLOR ";
        sensor = m_vDevice[i]->hasSensor(SENSOR_DEPTH); if (sensor) cout <<"DEPTH ";
        cout<<endl;

        ImageRegistrationMode mode = IMAGE_REGISTRATION_DEPTH_TO_COLOR;
        bool registrationSupported = m_vDevice[i]->getImageRegistrationMode();
        if(registrationSupported)
        {
            cout<<"Image registration SUPPORTED"<<endl;
            rc = m_vDevice[i]->setImageRegistrationMode(mode);
            // handle ret
            if (rc != STATUS_OK)
            {
                std::cout<<"Could not set the image registration on. Some error occured  "<<rc<<std::endl;
            }
        } else {
            cout<<"Image registration NOT SUPPORTED"<<endl;
        }

        rc = m_vDevice[i]->setDepthColorSyncEnabled(true);
        // handle rc
        if (rc != STATUS_OK)
        {
            std::cout<<"Could not set the depth-color sync. Some error occured"<<std::endl;
        }

        if (m_vDevice[i]->getSensorInfo(SENSOR_DEPTH) != NULL)
        {
            m_vDepth[i] = new VideoStream;
            rc = m_vDepth[i]->create(*m_vDevice[i], SENSOR_DEPTH);
            if (rc != STATUS_OK)
            {
                printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
            }
        }
        VideoMode depthVideoMode = m_vDepth[i]->getVideoMode();
        depthVideoMode.setResolution(640,480);
        rc = m_vDepth[i]->setVideoMode(depthVideoMode);
        if (rc != STATUS_OK)
        {
            printf("Couldn't set increased resolution for depth stream\n%s\n", OpenNI::getExtendedError());
        }

        rc = m_vDepth[i]->start();
        if (rc != STATUS_OK)
        {
            printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
        }


        char serialNumber[100];
        m_vDevice[i]->getProperty(ONI_DEVICE_PROPERTY_SERIAL_NUMBER,&serialNumber);
        std::cout<<"Device serial number "<<serialNumber<<std::endl;
        // check if a calibration file exists
        std::string home=getenv("HOME");
        std::string calibrationFileDepth = home+"/.ros/camera_info/depth_"+serialNumber+".yaml";
        std::string calibrationFileRGB = home+"/.ros/camera_info/rgb_"+serialNumber+".yaml";

        ifstream idepthfile(calibrationFileDepth.c_str());
        if (!idepthfile)
        {
            calibrationFileDepth = "";
        }
        idepthfile.close();

        ifstream irgbfile(calibrationFileRGB.c_str());
        if (!irgbfile)
        {
            calibrationFileRGB = "";
        }
        irgbfile.close();

        cout<<"RGB calib "<<calibrationFileRGB<<endl;
        cout<<"Depth calib "<<calibrationFileDepth<<endl;

        ColorCallback* colorCB = new ColorCallback(m_nodeHandle, m_vCameraNamespace[i], true, false,calibrationFileRGB);
        DepthCallback* depthCB = new DepthCallback(m_nodeHandle, m_vCameraNamespace[i], true, false,calibrationFileDepth);

        m_vDepthCallback.push_back(depthCB);
        m_vColorCallback.push_back(colorCB);

        if (m_vDevice[i]->getSensorInfo(SENSOR_COLOR) != NULL)
        {
            m_vColor[i] = new VideoStream;          

            rc = m_vColor[i]->create(*m_vDevice[i], SENSOR_COLOR);
            if (rc != STATUS_OK)
            {
                printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError());
            }

            CameraSettings* streamSettings = m_vColor[i]->getCameraSettings();

            if (streamSettings->getAutoExposureEnabled())
            {
                std::cout<<"Stream has auto exposure enabled"<<std::endl;
            } else {
                std::cout<<"Stream doesn't have auto exposure enabled"<<std::endl;
            }

            if (streamSettings->getAutoWhiteBalanceEnabled())
            {
                std::cout<<"Stream has white balancing enabled"<<std::endl;
            } else {
                std::cout<<"Stream doesn't have white balancing enabled"<<std::endl;
            }

//            streamSettings->setAutoExposureEnabled(false);
//            streamSettings->setGain(15000);
//            streamSettings->setAutoWhiteBalanceEnabled(false);
        }
        VideoMode colorVideoMode = m_vColor[i]->getVideoMode();
        colorVideoMode.setResolution(640,480);
        rc = m_vColor[i]->setVideoMode(colorVideoMode);
        if (rc != STATUS_OK)
        {
            printf("Couldn't set increased resolution for color stream\n%s\n", OpenNI::getExtendedError());
        }

        rc = m_vColor[i]->start();
        if (rc != STATUS_OK)
        {
            printf("Couldn't start the color stream\n%s\n", OpenNI::getExtendedError());
        }

        // Register to new frame
        m_vDepth[i]->addNewFrameListener(m_vDepthCallback[i]);
        m_vColor[i]->addNewFrameListener(m_vColorCallback[i]);
    }
}