Exemplo n.º 1
0
void Satin::startProcessing() {
	time_t the_time;
	ofstream fd;
	char outputFile[N][9], dischargePressure[N][3], carbonDioxide[N][3];
	double startTime, timeDifference;
	float smallSignalGain[N];
	int pNum, lNum, inputPowerData[N];

	startTime = (double) clock() / CLOCKS_PER_SEC;

	pNum = getInputPowers(inputPowerData);
	lNum = getLaserData(smallSignalGain, outputFile, dischargePressure, carbonDioxide);

	for (int i = 0; i < lNum; i++) {
		fd.open(outputFile[i], ios::out);

		if (fd.fail()) {
			cout << "Error opening " << outputFile[i] << "\n";
			exit(1);
		}

		time(&the_time);
		fd << "Start date: "
		   << ctime(&the_time)
		   << "\nGaussian Beam\n\n"
		   << "Pressure in Main Discharge = "
		   << dischargePressure[i]
		   << "kPa\n"
		   << "Small-signal Gain = "
		   << smallSignalGain[i]
		   << "\nCO2 via "
		   << carbonDioxide[i]
		   << "\n\nPin\t\tPout\t\tSat. Int.\tln(Pout/Pin)\tPout-Pin\n"
		   << "(watts)\t\t(watts)\t\t(watts/cm2)\t\t\t(watts)\n";

		fd.flush();
		fd.close();

		for (int j = 0; j < pNum; j++) {
			gaussianCalculation(inputPowerData[j], smallSignalGain[i], outputFile[i]);
		}

		fd.open(outputFile[i], ios::in | ios::app);
		time(&the_time);
		fd << "\nEnd date: " << ctime(&the_time);

		fd.flush();
		fd.close();

	}

	timeDifference = ((double) clock() / CLOCKS_PER_SEC) - startTime;
	cout << "The CPU time was "
		 << setiosflags(ios::fixed)
		 << setprecision(2)
		 << timeDifference
		 << " seconds.\n";
}
Exemplo n.º 2
0
    void Api::RunNavigationAlgorithm() {
        double v, w, l, pan, tilt;
        jderobot::LaserDataPtr laser;
        CvPoint2D32f dest;
        int i, j;
        jderobot::EncodersDataPtr encoders;
        int cont = 0;

        /*A PARTIR DE AQUÍ SE PUEDE AÑADIR EL CÓDIGO DE NAVEGACIÓN IMPLEMENTADO POR EL ESTUDIANTE*/

        /*ALGUNOS EJEMPLOS*/

        /*Manipulando imágenes de las cámaras*/
        /*En el siguiente ejemplo se filtra el color rojo de la cámara izquierda para repintar esos píxeles a negro. Para visualizar el resultado
        debemos desplegar la ventana "WINDOW DEBUGGING" y pulsar PLAY para hacer correr nuestro código*/
        imageCameras2openCV(); //Esta función es necesario llamarla ANTES de trabajar con las imágenes de las cámaras.

        cv::line(imageCameraLeft, cv::Point2d(0,0), cv::Point2d(imageCameraLeft.cols, imageCameraLeft.rows), cv::Scalar::all(255), 3);

        /* A continuacion se muestran las coordenadas de los puntos obtenidos tras hacer click en alguna de las camaras */
        //std::cout << x_click_cameraleft << std::endl; // Coordenada x del punto donde se ha hecho click en la camara izquierda
        //std::cout << y_click_cameraleft << std::endl; // Coordenada y del punto donde se ha hecho click en la camara izquierda
        //std::cout << x_click_cameraright << std::endl; // Coordenada x del punto donde se ha hecho click en la camara derecha
        //std::cout << y_click_cameraright << std::endl; // Coordenada y del punto donde se ha hecho click en la camara derecha






        /* EJEMPLO SENCILLO DE UN BUMP AND GO */

        /* TOMA DE SENSORES */
        //Aqui tomamos el valor de los sensores para alojarlo en nuestras variables locales
        laser = getLaserData(); // Get the laser info
        //printf("laser[45]: %d\n", laser->distanceData[45]);

        v = this->getMotorV();
        w = this->getMotorW();
        l = this->getMotorL();
        //        printf("v: %f , w: %f , l: %f\n", v, w, l);

        //dest = this->getDestino();
        //printf("destPoint = [%f, %f]\n", dest.x, dest.y);

        //encoders = this->getEncodersData();
        //printf("myPosition = [%f, %f]\n", encoders->robotx, encoders->roboty);
        /* FIN TOMA DE SENSORES */


        /*
        switch(accion){

        case 0:		// Robot hacia adelante

        if(( laser->distanceData[45] < 1000.0) or ( laser->distanceData[90] < 1000.0) or ( laser->distanceData[135] < 1000.0)){
        v=0.;
        //if ((x_ant == myPoint.x) and (y_ant == myPoint.y) and (z_ant == myPoint.z)){
        accion=1;
        printf("### Activado hacia Atras\n");
        //}
        }
        else
        v=50;
        break;

        case 1:		// Robot hacia atras
        if ((laser->distanceData[45] < 1100) or (laser->distanceData[90] < 1100) or (laser->distanceData[135] < 1100)){
        v=-50.;
        printf("### Llendo hacia atras\n");
        }
        else{
        v=0.;
        accion=2;
        }
        break;


        case 2:		// Robot girando.
        if((laser->distanceData[45] < 1300) or (laser->distanceData[90] < 1300) or (laser->distanceData[135] < 1300)){
        if(sentido%2==0){
        w=50.;
        }
        else{
        w=50.*(-1);
        }
        printf("### Girando: %d \n", sentido);
        }
        else{
        w=0.;
        accion=0;
        sentido = (1 + rand() % 40);
        }

        break;
        }
         */
        /*Comandar robot*/
        //Aqui enviamos los datos al robot
        //w = 0.;
        this->setMotorW(3);
        this->setMotorV(0);

        this->setMotorL(0);

        //pan=0;
        //tilt=0;
        //this->setPTEncoders(pan,tilt,1); //Con el segundo parametro elegimos la camara a comandar (1 o 2)



    }
Exemplo n.º 3
0
/**
 * @brief RobotThread::RobotThread
 */
RobotThread::RobotThread()
{
    // necessary to send this type over qued connections
    qRegisterMetaType< QVector<double> >("QVector<double>");
    qRegisterMetaType< QList<Obstacle> >("QList<Obstacle>");
    qRegisterMetaType< QList< QPair<double,double> > >("QList< QPair<double,double> >"); // fuer path planning -> path realizer
    qRegisterMetaType< PathPlotData >("PathPlotData");
    qRegisterMetaType< FilterParams >("FilterParams");
    qRegisterMetaType< CameraParams >("CameraParams");
    qRegisterMetaType< PIDParams >("PIDParams");
    qRegisterMetaType< PIDPlotData >("PIDPlotData");
    qRegisterMetaType< cv::Mat >("cv::Mat");
    qRegisterMetaType< QString >("QString"); //Log
    qRegisterMetaType< LaserPlotData > ("LaserPlotData");
    qRegisterMetaType< Position > ("Position");
    qRegisterMetaType< TeamColor >("TeamColor");
    qRegisterMetaType< CamColor >("CamColor");

    qDebug() << "INIT 'RobotThread'";

    //initialize Objects for Actors and Sensory
    actorLowLevel   = new ActorLowLevel();
    actorHighLevel    = new ActorHighLevel();
    sensorHighLevel = new SensorHighLevel();
    sensorLowLevel  = new SensorLowLevel();
    pathPlanner     = new PathPlanning();
    gameEngine      = new GameEngine();
    cam             = new Cam();

    // moving objects to different threads
    sensorLowLevel->moveToThread(&threadRobotLowLevel);
    sensorHighLevel->moveToThread(&threadSensorHighLevel);
    actorLowLevel->moveToThread(&threadRobotLowLevel);
    actorHighLevel->moveToThread(&threadActorHighLevel);
    pathPlanner->moveToThread(&threadPathPlanner);
    gameEngine->moveToThread(&threadGameEngine);
    cam->moveToThread(&threadCam);

    // Name the threads (helpful for debugging)
    threadRobotLowLevel.setObjectName("Robot Low Level Thread");
    threadSensorHighLevel.setObjectName("High Level Sensor Thread");
    threadPathRealizer.setObjectName("Path Realizer Thread");
    threadPathPlanner.setObjectName("Path Planner Thread");
    threadGameEngine.setObjectName("Game Engine Thread");
    threadCam.setObjectName("Cam Thread");


    // ***********************************************************************
    // DELETE LATER
    connect(&threadRobotLowLevel, SIGNAL(finished()),
            sensorLowLevel, SLOT(deleteLater()));

    connect(&threadSensorHighLevel, SIGNAL(finished()),
            sensorHighLevel, SLOT(deleteLater()));

    connect(&threadRobotLowLevel, SIGNAL(finished()),
            actorLowLevel, SLOT(deleteLater()));

    connect(&threadActorHighLevel, SIGNAL(finished()),
            actorHighLevel, SLOT(deleteLater()));

    connect(&threadPathPlanner, SIGNAL(finished()),
            pathPlanner, SLOT(deleteLater()));

    connect(&threadCam, SIGNAL(finished()),
            cam, SLOT(deleteLater()));

    connect(&threadGameEngine, SIGNAL(finished()),
            gameEngine, SLOT(deleteLater()));

    // ***********************************************************************
    // SIGNALS FROM GUI

    // start Orientation
    connect(mainWindow,SIGNAL(signalStartOrientation(bool)),
            sensorHighLevel, SLOT(slotStartDetection(bool)));

    // remote controle over GUI
    connect(mainWindow,SIGNAL(robotRemoteControllUpdate(double,double)),
            actorLowLevel,SLOT(setRobotRemoteControllParams(double,double)));


    // remote Odometry update
    connect(mainWindow,SIGNAL(updateRemoteOdometry(Position)),
            actorLowLevel, SLOT(setOdometry(Position)));

    // Cam Display
    // Camera Parameters in GUI have changed, send to Cam class
    connect(mainWindow, SIGNAL(signalChangeCamParams(CameraParams)),
            cam, SLOT(slotSetCameraParams(CameraParams)));



    // Set Median Filter
    connect(mainWindow, SIGNAL(signalChangeFilterParams(FilterParams)),
            sensorHighLevel, SLOT(slotSetFilterParams(FilterParams)));

    // PID Tab
    connect(mainWindow, SIGNAL(signalChangePIDParams(PIDParams)),
            actorHighLevel, SLOT(slotChangePIDParams(PIDParams)));


    // ***********************************************************************
    // SIGNALS TO GUI

    // Sensor Display
    // Display the sensor data in GUI
    connect(sensorLowLevel, SIGNAL(signalLaserPlotRaw(LaserPlotData)), // raw data
            mainWindow, SLOT(slotLaserDisplay(LaserPlotData)));
    connect(sensorHighLevel, SIGNAL(signalSendLaserData(LaserPlotData)), // filtered data
            mainWindow, SLOT(slotLaserDisplay(LaserPlotData)));

    // Path Display
    // Display potential map and raw waypoints
    connect(pathPlanner, SIGNAL(pathDisplay(PathPlotData)),
            mainWindow, SLOT(updatePathDisplay(PathPlotData)));

    // Display calculated spline
    connect(actorHighLevel, SIGNAL(signalSplinePlot(PathPlotData)),
            mainWindow, SLOT(updatePathDisplay(PathPlotData)));

    // Display PID Plot
    connect(actorHighLevel, SIGNAL(signalPIDPlot(PIDPlotData)),
            mainWindow, SLOT(slotPIDPlot(PIDPlotData)));

    // Cam grabbed a frame, display in GUI
    connect(cam, SIGNAL(signalDisplayFrame(cv::Mat)),
            mainWindow, SLOT(slotDisplayFrame(cv::Mat)));

    // ***********************************************************************

    // emergency detected/received
    connect(sensorHighLevel,SIGNAL(signalEmergencyStopEnabled(bool)), // emergency stop durch kollisionsvermeidung
            actorLowLevel,SLOT(slotEmergencyStopEnabled(bool)));

    connect(gameEngine, SIGNAL(signalEmergencyStopEnabled(bool)), // stopMovement() von Referee
            actorLowLevel, SLOT(slotEmergencyStopEnabled(bool)));

    //send turn command from high sensor to low level actor
    connect(sensorHighLevel,SIGNAL(signalSendRobotControlParams(double,double)),
            actorLowLevel,SLOT(setRobotControllParams(double,double)));

    // controle command from High Level Aktor
    connect(actorHighLevel,SIGNAL(signalSendRobotControlParams(double,double)),
            actorLowLevel,SLOT(setRobotControllParams(double,double)));

    // Get waypoints from path planning into the path realizer
    connect(pathPlanner, SIGNAL(sendUpdatedWaypoints(QList< QPair<double,double> >)),
            actorHighLevel, SLOT(slotUpdateWaypoints(QList< QPair<double,double> >)));

    // SENSOR DATA TO HIGH LEVEL SENSOR
    connect(sensorLowLevel,SIGNAL(laserDataReady(QVector<double>)),
            sensorHighLevel, SLOT(getLaserData(QVector<double>)));

    connect(sensorLowLevel,SIGNAL(sonarDataReady(QVector<double>)),
            sensorHighLevel, SLOT(getSonarData(QVector<double>)));

    // start color detection from sensor
    connect(sensorHighLevel, SIGNAL(signalStartColorDetection()),
            cam, SLOT(slotStartColorDetection()));

    // color succesfully detected
    connect(cam, SIGNAL(signalColorDetected(CamColor)),
            sensorHighLevel, SLOT(slotColorDetected(CamColor)));

    // ***********************************************************************
    // update Odometry
    connect(sensorLowLevel,SIGNAL(updateOdometry()),
            actorLowLevel, SLOT(getOdometry()));

    connect(sensorHighLevel, SIGNAL(sendOdometryData(Position)),
            actorLowLevel, SLOT(setOdometry(Position)));

    // ***********************************************************************
    // Signals from GameEngine
    connect(gameEngine,SIGNAL(signalStartDetection(bool)),
            sensorHighLevel, SLOT(slotStartDetection(bool)));

    // Signals to GameEngine
    connect(sensorHighLevel, SIGNAL(signalSendTeamColor(CamColor)),
            gameEngine, SLOT(slotDetectionFinished(CamColor)));

    // starting the eventloop of all threads
    threadRobotLowLevel.start();
    threadSensorHighLevel.start();
    threadPathRealizer.start();
    threadPathPlanner.start();
    threadCam.start();
    threadGameEngine.start();


    // Starte Game Engine State Machine
    gameEngine->start();

    // Start the PathPlanning "loop"
    /// \todo: Das ist erstmal nur zum Debuggen drinnen.
    // Spaeter darf die Pfadplanung nur bei beginn der Spielphase aus der GameEngine heraus angestossen werden
    QMetaObject::invokeMethod(pathPlanner, "planPath");
}