コード例 #1
0
std::string 
ReplayConductor::toString( const IceUtil::Time &t )
{
    stringstream ss;
    ss << t.toSeconds() << ":" << t.toMicroSeconds()-t.toSeconds()*1e6;
    return ss.str();
}
コード例 #2
0
ファイル: PatchClientDlg.cpp プロジェクト: bholl/zeroc-ice
bool
CPatchDlg::patchProgress(Ice::Long, Ice::Long, Ice::Long totalProgress, Ice::Long totalSize)
{
    IceUtil::Time elapsed = IceUtil::Time::now(IceUtil::Time::Monotonic) - _startTime;
    if(elapsed.toSeconds() > 0)
    {
        CString speed;
        speed.Format(L" %s/s", convertSize(totalProgress / elapsed.toSeconds()));
        _speed->SetWindowText(speed);
    }

    int pcnt = 100;
    if(totalSize > 0)
    {
        pcnt = static_cast<int>(totalProgress * 100 / totalSize);
    }
    CString percent;
    percent.Format(L"%d%%", pcnt);
    _percent->SetWindowText(percent);

    CString total;
    total.Format(L" %s / %s", convertSize(totalProgress), convertSize(totalSize));
    _total->SetWindowText(total);

    _progress->SetPos(pcnt);

    processMessages();
    return !_isCancel;
}
コード例 #3
0
void
ReplayConductor::handleRewind( const IceUtil::Time &deltaT )
{
    bool wasPlaying = isPlaying_;
    if ( wasPlaying )
        handlePause();

    int sec, usec;
    masterFileReader_.getCursorTime( sec, usec );

    IceUtil::Time tNew = orcalog::iceUtilTime( sec, usec );
    tNew -= deltaT;
    masterFileReader_.placeCursorAtOrAfterTime( tNew.toSeconds(),
                                                (int)(tNew.toMicroSeconds()-tNew.toSeconds()*1e6) );

    if ( wasPlaying )
        handleStart();
}
コード例 #4
0
ファイル: driver.cpp プロジェクト: mjs513/orca-robotics
void
Driver::read( hydrointerfaces::LaserScanner2d::Data &data )
{
    // tmp data storage
    hokuyo_aist::HokuyoData hokuyoData;

    try {
        laser_->GetRanges( &hokuyoData );
        // set the time stamp right away
        IceUtil::Time t = IceUtil::Time::now();
        data.timeStampSec = (int)t.toSeconds();
        data.timeStampUsec = (int)t.toMicroSeconds() - data.timeStampSec*1000000;

        //TODO: Michael 23/Sept/08
        //this timeStamp is actually at the _end_ of the scan; also
        //scan-time is _not_ negligable for the hokuyos (10-30Hz --> 100-33ms)
        //
        //--> need to keep track of delta time between scans, and use fieldOfView
        //to calculate a backcorrection, assuming 1 rotation per scan (true for
        //URG and UHG, unknown for TopURG). Also should put something like
        //durationOfScan into the interface.
        //
        //This would allow to interpolate senor pose per individual range-reading.
        //Yes, I really think this is neccessary.
    }
    catch (hokuyo_aist::HokuyoError &e)
    {
        std::stringstream ss;
        ss << "Read on urg_nz failed. Errorcode: "
            << e.Code()
            << ", Errordescription: "
            << e.what()
            << " continuing";
        //context_.tracer().error( ss.str() ,2 );
        throw gbxutilacfr::HardwareException( ERROR_INFO, ss.str() );
    }

    const uint32_t* ranges = hokuyoData.Ranges();
    //const uint32_t* intensities = hokuyoData.Intensities();
    size_t length = hokuyoData.Length();
    assert(config_.numberOfSamples == (int)length); // make sure the receiving end has enough space
    double minRangeHokuyo = config_.minRange*1000.0; // convert to mm
    for (unsigned int i=0; i<length; ++i )
    {
        if(ranges[i] < minRangeHokuyo){
            //these guys are error-codes (per reading); setting to maxRange to get behavior similar to SICK
            data.ranges[i]  = config_.maxRange;
        }else{
            data.ranges[i]  = ranges[i] / 1000.0;
        }
    }
}
コード例 #5
0
ファイル: CameraUtils.cpp プロジェクト: Diegojnb/JdeRobot
jderobot::ImageDataPtr CameraUtils::convert(const cv::Mat &image) {

    jderobot::ImageDataPtr reply=jderobot::ImageDataPtr(new jderobot::ImageData());
    reply->description = jderobot::ImageDescriptionPtr(new jderobot::ImageDescription());
    IceUtil::Time t = IceUtil::Time::now();
    reply->timeStamp.seconds = (long)t.toSeconds();
    reply->timeStamp.useconds = (long)t.toMicroSeconds() - reply->timeStamp.seconds*1000000;
    reply->description->format = colorspaces::ImageRGB8::FORMAT_RGB8.get()->name;
    reply->description->width=image.size().width;
    reply->description->height=image.size().height;
    reply->pixelData.resize(image.rows*image.cols * image.channels());
    memcpy(&(reply->pixelData[0]),(unsigned char *) image.data, image.rows*image.cols * image.channels());
    return reply;
}
コード例 #6
0
ファイル: driver.cpp プロジェクト: mjs513/orca-robotics
void
Driver::read( hydrointerfaces::Image::Data &data )
{
    context_.tracer().debug( "Copying image file data..." );

    //Qt uses a uint32_t array not a byte aligned array so we must cut off the end of each scanline.
    uint32_t bytesPerLine = static_cast<uint32_t>(image_.width()) * 3;
    for(uint32_t i = 0; i < static_cast<uint32_t>(image_.height()); i++)
    {
        memcpy(data.pixelData+i*bytesPerLine, image_.scanLine(i), bytesPerLine);
    }

    IceUtil::Time t = IceUtil::Time::now();
    data.timeStampSec = (int)t.toSeconds();
    data.timeStampUsec = (int)t.toMicroSeconds() - data.timeStampSec*1000000;
}
コード例 #7
0
ファイル: imagei.cpp プロジェクト: JdeRobot4Air/v4l2server
 void ReplyTask::run() {
   jderobot::ImageDataPtr reply(new jderobot::ImageData);
   ::jderobot::Time t;
   while (1) {
     IceUtil::Time t = IceUtil::Time::now();
     reply->timeStamp.seconds = (long) t.toSeconds();
     reply->timeStamp.useconds = (long) t.toMicroSeconds()
         - reply->timeStamp.seconds * 1000000;
     {  //critical region start
       IceUtil::Mutex::Lock sync(requestsMutex);
       while (!requests.empty()) {
         jderobot::AMD_ImageProvider_getImageDataPtr cb = requests.front();
         requests.pop_front();
         cb->ice_response(reply);
       }
     }
   }
 }
コード例 #8
0
ファイル: driver.cpp プロジェクト: mjs513/orca-robotics
void
Driver::read( hydrointerfaces::MultiImage::Data& data )
{

    context_.tracer().debug( "Grabbing frame(s) from camera(s)..." );
   
    for( unsigned int i=0; i<cameras_.size(); ++i )
    {
        // This performs the grab and the retrieve with one cv function call
        frames_.at(i) = cvQueryFrame( cameras_.at(i) );
        if( frames_.at(i) == NULL )
        {
            stringstream ss;
            ss << "Failed to retrieve frame from Camera " << i;
            throw gbxutilacfr::Exception( ERROR_INFO, ss.str() );
        }
    
        memcpy( data.at(i).pixelData, frames_.at(i)->imageData, frames_.at(i)->imageSize );
        
        IceUtil::Time t = IceUtil::Time::now();
        data.at(i).timeStampSec = (int)t.toSeconds();
        data.at(i).timeStampUsec = (int)t.toMicroSeconds() - data.at(i).timeStampSec*1000000;
    }
}