コード例 #1
0
ファイル: NaoImageTranscriber.cpp プロジェクト: chachi/nbites
void NaoImageTranscriber::run()
{
    Thread::running = true;
    Thread::trigger->on();

    long long lastProcessTimeAvg = VISION_FRAME_LENGTH_uS;

    struct timespec interval, remainder;
    while (Thread::running) {
        PROF_ENTER(P_MAIN);
        PROF_ENTER(P_GETIMAGE);
        //start timer
        const long long startTime = monotonic_micro_time();

        topImageTranscriber.waitForImage();
        bottomImageTranscriber.waitForImage();

        subscriber->notifyNextVisionImage();

        PROF_EXIT(P_GETIMAGE);

        //stop timer
        const long long processTime = monotonic_micro_time() - startTime;
        //sleep until next frame

        lastProcessTimeAvg = lastProcessTimeAvg/2 + processTime/2;

        if (processTime > VISION_FRAME_LENGTH_uS) {
            if (processTime > VISION_FRAME_LENGTH_PRINT_THRESH_uS) {
#ifdef DEBUG_ALIMAGE_LOOP
                cout << "Time spent in ALImageTranscriber loop longer than"
                          << " frame length: " << processTime <<endl;
#endif
            }
            //Don't sleep at all
        } else{
            const long int microSleepTime =
                static_cast<long int>(VISION_FRAME_LENGTH_uS - processTime);
            const long int nanoSleepTime =
                static_cast<long int>((microSleepTime %(1000 * 1000)) * 1000);

            const long int secSleepTime =
                static_cast<long int>(microSleepTime / (1000*1000));

            // cout << "Sleeping for nano: " << nanoSleepTime
            //      << " and sec:" << secSleepTime << endl;

            interval.tv_sec = static_cast<time_t>(secSleepTime);
            interval.tv_nsec = nanoSleepTime;

            nanosleep(&interval, &remainder);
        }
        PROF_EXIT(P_MAIN);
        PROF_NFRAME();
    }
    Thread::trigger->off();
}
コード例 #2
0
ファイル: CommModule.cpp プロジェクト: CheddarB/nbites
void CommModule::send()
{
    _worldModelInput.latch();

    teamConnect->send(_worldModelInput.message(), myPlayerNumber(),
                      gameConnect->myTeamNumber(), 1);
    PROF_ENTER(P_COMM_TIMER);
    timer->teamPacketSent();
    PROF_EXIT(P_COMM_TIMER);
}
コード例 #3
0
ファイル: ScriptedProvider.cpp プロジェクト: CheddarB/nbites
void ScriptedProvider::setNextBodyCommand(std::vector<float>& sensorAngles) {
    // If there are no more commands, don't try to enqueue one
    if ( !bodyCommandQueue.empty() ) {
	BodyJointCommand::ptr nextCommand = bodyCommandQueue.front();
	bodyCommandQueue.pop();

    PROF_ENTER(P_CHOPPED);
	// Replace the current command
	currCommand = chopper.chopCommand(nextCommand, sensorAngles);
    PROF_EXIT(P_CHOPPED);
    }
}
コード例 #4
0
ファイル: CommModule.cpp プロジェクト: CheddarB/nbites
void CommModule::run_()
{
    PROF_ENTER(P_COMM_RECEIVE);
    receive();
    PROF_EXIT(P_COMM_RECEIVE);

    teamConnect->checkDeadTeammates(_worldModels,
                                    timer->timestamp(),
                                    myPlayerNumber());

    burstRate = monitor->performHealthCheck(timer->timestamp());

#ifdef LOG_COMM
    monitor->logOutput(timer->timestamp());
#endif

    PROF_ENTER(P_COMM_SEND);
    if (timer->timeToSend() && myPlayerNumber() > 0)
    {
        send();
    }
    else
    {
// Make profiler happy. SO UGLY!!!
PROF_ENTER(P_COMM_BUILD_PACKET);
PROF_EXIT(P_COMM_BUILD_PACKET);
PROF_ENTER(P_COMM_SERIALIZE_PACKET);
PROF_EXIT(P_COMM_SERIALIZE_PACKET);
PROF_ENTER(P_COMM_TO_SOCKET);
PROF_EXIT(P_COMM_TO_SOCKET);
PROF_ENTER(P_COMM_TIMER);
PROF_EXIT(P_COMM_TIMER);
    }
    PROF_EXIT(P_COMM_SEND);
}
コード例 #5
0
ファイル: TranscriberModule.cpp プロジェクト: oneamtu/nbites
// The heart of the transcriber, returns an image full of pixels from video mem
messages::YUVImage ImageTranscriber::getNextImage()
{
    // dequeue a frame buffer (this call blocks when there is
    // no new image available)
    if(cameraType == Camera::TOP)
    {
        PROF_ENTER(P_TOP_DQBUF);
    }
    else
    {
        PROF_ENTER(P_BOT_DQBUF);
    }

    verify(ioctl(fd, VIDIOC_DQBUF, &requestBuff),
           "Dequeueing the frame buffer failed.");

    if(cameraType == Camera::TOP)
    {
        PROF_EXIT(P_TOP_DQBUF);
    }
    else
    {
        PROF_EXIT(P_BOT_DQBUF);
    }

    if(requestBuff.bytesused != (unsigned int)SIZE)
        std::cerr << "CAMERA::ERROR::Wrong buffer size!" << std::endl;

    static bool shout = true;
    if (shout) {
        shout = false;
        std::cerr << "CAMERA::Camera is working." << std::endl;
    }

    // Pixels -> TranscriberBuffer -> YUVImage
    return messages::YUVImage(new TranscriberBuffer(mem[requestBuff.index],
                                                    fd,
                                                    requestBuff),
                              2*WIDTH, HEIGHT, 2*WIDTH);
}
コード例 #6
0
void BehaviorsModule::run_ ()
{
    PROF_ENTER(P_BEHAVIORS);
    static unsigned int num_crashed = 0;
    if (error_state && num_crashed < NUM_PYTHON_RESTARTS_MAX) {
        this->reload_hard();
        error_state = false;
        num_crashed++;
    }

    // Latch incoming messages and prepare outgoing messages
    prepareMessages();

    PROF_ENTER(P_PYTHON);

    // Call main run() method of Brain
    if (brain_instance != NULL) {
        PyObject *result = PyObject_CallMethod(brain_instance, "run", NULL);
        if (result == NULL) {
            // set BehaviorsModule in error state
            error_state = true;
            // report error
            std::cout << "Error occurred in Brain.run() method" << std::endl;
            if (PyErr_Occurred()) {
                PyErr_Print();
            } else {
                std::cout << "  No Python exception information available"
                          << std::endl;
            }
        } else {
            Py_DECREF(result);
        }
    }
    PROF_EXIT(P_PYTHON);

    // Send outgoing messages
    sendMessages();

    PROF_EXIT(P_BEHAVIORS);
}
コード例 #7
0
ファイル: TeamConnect.cpp プロジェクト: MisterSquishy/nbites
void TeamConnect::send(const messages::WorldModel& model,
                       int player, int team, int burst = 1)
{
    if (!model.IsInitialized())
    {
#ifdef DEBUG_COMM
        std::cerr << "Comm does not have a valid input to send." << std::endl;
#endif
        return;
    }

PROF_ENTER(P_COMM_BUILD_PACKET);

    portals::Message<messages::TeamPacket> teamMessage(0);

    messages::TeamPacket* packet = teamMessage.get();

    packet->mutable_payload()->CopyFrom(model);
    packet->set_sequence_number(myLastSeqNum++); // ONE LINE INCREMENT!!
    packet->set_player_number(player);
    packet->set_team_number(team);
    packet->set_header(UNIQUE_ID);
    packet->set_timestamp(timer->timestamp());

PROF_EXIT(P_COMM_BUILD_PACKET);

PROF_ENTER(P_COMM_SERIALIZE_PACKET);
    char datagram[packet->ByteSize()];
    packet->SerializeToArray(&datagram[0], packet->GetCachedSize());
PROF_EXIT(P_COMM_SERIALIZE_PACKET);

PROF_ENTER(P_COMM_TO_SOCKET);
    for (int i = 0; i < burst; ++i)
    {
        socket->sendToTarget(&datagram[0], packet->GetCachedSize());
    }
PROF_EXIT(P_COMM_TO_SOCKET);
}
コード例 #8
0
ファイル: VisionModule.cpp プロジェクト: MisterSquishy/nbites
void VisionModule::run_()
{
    topThrImage.latch();
    topYImage.latch();
    topUImage.latch();
    topVImage.latch();
    botThrImage.latch();
    botYImage.latch();
    botUImage.latch();
    botVImage.latch();
    joint_angles.latch();
    inertial_state.latch();

    PROF_ENTER(P_VISION);

    vision->notifyImage(topThrImage.message(), topYImage.message(),
                        topUImage.message(), topVImage.message(),
                        botThrImage.message(), botYImage.message(),
                        botUImage.message(), botVImage.message(),
                        joint_angles.message(), inertial_state.message());

    PROF_EXIT(P_VISION);

    updateVisionBall();
    updateVisionRobot();
    updateVisionField();
    updateVisionObstacle();

#ifdef OFFLINE
    portals::Message<messages::ThresholdImage> top, bot;
    top = new messages::ThresholdImage(vision->thresh->betterDebugImage, 320, 240, 320);
    bot = new messages::ThresholdImage(vision->thresh->thresholdedBottom, 320, 240, 320);


    topOutPic.setMessage(top);
    botOutPic.setMessage(bot);

#endif

    /* In order to keep logs synced up, joint angs and inert states are passed
     * thru the vision system. Joint angles are taken at around 100 hz, but
     * images are taken at 30 hz, but by passing joint angles thru vision we
     * get joint angles at 30 hz. */
#ifdef LOG_VISION
    joint_angles_out.setMessage(portals::Message<messages::JointAngles>(
                                &joint_angles.message()));
    inertial_state_out.setMessage(portals::Message<messages::InertialState>(
                                  &inertial_state.message()));
#endif
}
コード例 #9
0
ファイル: LocalizationModule.cpp プロジェクト: AdamBCo/nbites
void LocalizationModule::run_()
{
    // Profiler
    PROF_ENTER(P_SELF_LOC);

    motionInput.latch();
    visionInput.latch();
    resetInput.latch();

    update();

    // Profiler
    PROF_EXIT(P_SELF_LOC);
}
コード例 #10
0
void LocalizationModule::run_()
{
    PROF_ENTER(P_SELF_LOC);
    motionInput.latch();
    visionInput.latch();
#ifndef OFFLINE
    gameStateInput.latch();
    ballInput.latch();
    resetInput[0].latch();
    resetInput[1].latch();
#endif

    update();
    PROF_EXIT(P_SELF_LOC);
}
コード例 #11
0
ファイル: ScriptedProvider.cpp プロジェクト: CheddarB/nbites
void ScriptedProvider::calculateNextJointsAndStiffnesses(
    std::vector<float>&            sensorAngles,
    std::vector<float>&            sensorCurrents,
    const messages::InertialState& sensorInertials,
    const messages::FSR&           sensorFSRs)
{
    PROF_ENTER(P_SCRIPTED);

    if (currCommandEmpty())
	setNextBodyCommand(sensorAngles);

    // Go through the chains and enqueue the next
    // joints from the ChoppedCommand.
    boost::shared_ptr<std::vector <std::vector <float> > > currentChains(getCurrentChains(sensorAngles));

    currCommand->nextFrame(); // so Python can keep track of progress

    for (unsigned int id=0; id< Kinematics::NUM_CHAINS; ++id ) {
        Kinematics::ChainID cid = static_cast<Kinematics::ChainID>(id);
	if ( currCommand->isDone() ){
	    setNextChainJoints( cid,
				currentChains->at(cid) );
	}else{
	    setNextChainJoints( cid,
				currCommand->getNextJoints(cid) );
	}
	// Curr command will allways provide the current stiffnesses
	// even if it is finished providing new joint angles.
	setNextChainStiffnesses( cid,
				 currCommand->getStiffness(cid) );
    }

    setActive();

    PROF_EXIT(P_SCRIPTED);
}
コード例 #12
0
ファイル: GuardianModule.cpp プロジェクト: dmcavoy/nbites
void GuardianModule::run_()
{
    PROF_ENTER(P_ROBOGUARDIAN);

    temperaturesInput.latch();
    chestButtonInput.latch();
    footBumperInput.latch();
    inertialInput.latch();
    fsrInput.latch();
    batteryInput.latch();

    countButtonPushes();
    checkFalling();
    checkFallen();
    checkFeetOnGround();
    checkBatteryLevels();
    checkTemperatures();
    processFallingProtection();
    processChestButtonPushes();
    processFootBumperPushes();
    checkAudio();
    frameCount++;
    PROF_EXIT(P_ROBOGUARDIAN);
}
コード例 #13
0
ファイル: DiagramThread.cpp プロジェクト: MarcusEFC/nbites
// Overrides the RoboGram::run() method to do timing as well
void DiagramThread::RobotDiagram::run()
{
    // Start timer
    const long long startTime = realtime_micro_time();

    if (name == "cognition")
    {
        PROF_ENTER(P_COGNITION_THREAD);
    }
    else if (name == "sensors")
    {
        PROF_ENTER(P_MOTION_THREAD);
    }
    else if (name == "comm")
    {
        PROF_ENTER(P_COMM_THREAD);
    }
    else if (name == "guardian")
    {
        PROF_ENTER(P_GUARDIAN_THREAD);
    }

    RoboGram::run();

    if (name == "cognition")
    {
        PROF_EXIT(P_COGNITION_THREAD);
        // Count cognition frames
        PROF_NFRAME();
    }
    else if (name == "sensors")
    {
        PROF_EXIT(P_MOTION_THREAD);
    }
    else if (name == "comm")
    {
        PROF_EXIT(P_COMM_THREAD);
    }
    else if (name == "guardian")
    {
        PROF_EXIT(P_GUARDIAN_THREAD);
    }

    // Stop timer
    const long long processTime = realtime_micro_time() - startTime;

    // If we're under the frame length, this is good.
    // We can sleep for the rest of the frame and let others process.
    if (processTime < frameLengthMicro)
    {
        // Compute the time we should sleep from the amount of time
        // we processed this frame and the amount of time allotted to a frame
        const long int microSleepTime =
            static_cast<long int>(frameLengthMicro - processTime);
        const long int nanoSleepTime =
            static_cast<long int>((microSleepTime %(1000 * 1000)) * 1000);

        const long int secSleepTime =
            static_cast<long int>(microSleepTime / (1000*1000));

        interval.tv_sec = static_cast<time_t>(secSleepTime);
        interval.tv_nsec = nanoSleepTime;

        // Sleep!
        nanosleep(&interval, &remainder);
    }

#ifdef DEBUG_THREADS
    else if (processTime > frameLengthMicro*1.5) {
        std::cout<< "Warning: time spent in " << name << " thread longer"
                 << " than frame length: "<< processTime << " uS" <<
            std::endl;
    }
#endif

}