示例#1
0
void KinectDevice::getFrameInformation() {
    // Get the frame information
    IDepthFrameSource *depthSrc;
    IColorFrameSource *colorSrc;
    IInfraredFrameSource *irSrc;
    ILongExposureInfraredFrameSource *hdirSrc;
    IBodyIndexFrameSource *indexSrc;

    IFrameDescription *depthDesc, *colorDesc, *irDesc, *hdirDesc, *indexDesc;

    if (_streams & Streams::DEPTH_STREAM) {
        _sensor->get_DepthFrameSource(&depthSrc);
        depthSrc->get_FrameDescription(&depthDesc);
        depthFrameInfo = FrameInfo(depthDesc);

        // Min/max vals
        depthSrc->get_DepthMinReliableDistance(&depthFrameInfo.minVal);
        depthSrc->get_DepthMaxReliableDistance(&depthFrameInfo.maxVal);

        // Allocate
        depthData =         std::shared_ptr<uint16_t>(new uint16_t[depthFrameInfo.frameSize]);
        prevDepthData =     std::shared_ptr<uint16_t>(new uint16_t[depthFrameInfo.frameSize]);
    }
    else {
        depthData = nullptr;
        prevDepthData = nullptr;
    }

    if (_streams & Streams::COLOR_STREAM) {
        _sensor->get_ColorFrameSource(&colorSrc);
        colorSrc->get_FrameDescription(&colorDesc);
        colorFrameInfo = FrameInfo(colorDesc);

        colorData = std::shared_ptr<uint16_t>(new uint16_t[colorFrameInfo.frameSize]);
    }
    if (_streams & Streams::IR_STREAM) {
        _sensor->get_InfraredFrameSource(&irSrc);
        irSrc->get_FrameDescription(&irDesc);
        irFrameInfo = FrameInfo(irDesc);

        irData = std::shared_ptr<uint16_t>(new uint16_t[irFrameInfo.frameSize]);
    }
    if (_streams & Streams::HDIR_STREAM) {
        _sensor->get_LongExposureInfraredFrameSource(&hdirSrc);
        hdirSrc->get_FrameDescription(&hdirDesc);
        hdirFrameInfo = FrameInfo(hdirDesc);

        hdirData = std::shared_ptr<uint16_t>(new uint16_t[hdirFrameInfo.frameSize]);
    }
    if (_streams & Streams::INDEX_STREAM) {
        _sensor->get_BodyIndexFrameSource(&indexSrc);
        indexSrc->get_FrameDescription(&indexDesc);
        indexFrameInfo = FrameInfo(indexDesc);

        indexData = std::shared_ptr<BYTE>(new BYTE[indexFrameInfo.frameSize]);
    }
}
示例#2
0
 void FrameDB::add_info(const string& line) {
   // this is file/line info for some (module, offset)
   vector<string> parts;
   split(line, "|", parts);
   
   char *err;
   uintptr_t offset = strtol(parts[4].c_str(), &err, 0);
   if (*err) {
     cerr << "Invalid offset in viewer-data/symtab: " << parts[4] << endl;
     exit(1);
   }
   string module = parts[3];
   FrameId key(module, offset);          
   
   if (parts[0] == "?") {
     frames[key] = FrameInfo();
           
   } else {
     int line_num = strtol(parts[1].c_str(), &err, 10);
     if (*err) {
       cerr << "Invalid line number in viewer-data/symtab: " << parts[1] << endl;
       exit(1);
     }
     
     FrameInfo info(module, offset, parts[0], line_num, parts[2]);
     frames[key] = info;
   }
 }
示例#3
0
void setupGUI(nub::soft_ref<GeneralGUI> pwiiGUI, nub::soft_ref<OutputFrameSeries> ofs, nub::soft_ref<PWiiController> controller) {

  Image<PixRGB<byte> > img(1,1,ZEROS);
  ofs->writeRGB(img, "Output", FrameInfo("output", SRC_POS));

  pwiiGUI->startThread(ofs);
  pwiiGUI->setupGUI(controller.get(), true);

  //Setup Meters
  pwiiGUI->addMeter(controller->getMotor1SpeedPtr(),
        "Motor 1 Speed", 100, PixRGB<byte>(255, 0, 0));
  pwiiGUI->addMeter(controller->getMotor2SpeedPtr(),
        "Motor 2 Speed", 100, PixRGB<byte>(255, 0, 0));
  pwiiGUI->addMeter(controller->getMotor1DirPtr(),
        "Motor 1 Direction", 4, PixRGB<byte>(192, 255, 0));
  pwiiGUI->addMeter(controller->getMotor2DirPtr(),
        "Motor 2 Direction", 4, PixRGB<byte>(192, 255, 0));
  pwiiGUI->addMeter(controller->getTransVelPtr(),
        "Translational Velocity", 100, PixRGB<byte>(192, 255, 0));
  pwiiGUI->addMeter(controller->getRotVelPtr(),
        "Rotational Velocity", 100, PixRGB<byte>(192, 255, 0));

  pwiiGUI->addMeter(controller->getXAccelPtr(),
        "X Acceleration", 255, PixRGB<byte>(192, 192, 255));
  pwiiGUI->addMeter(controller->getYAccelPtr(),
        "Y Acceleration", 255, PixRGB<byte>(192, 192, 255));
  pwiiGUI->addMeter(controller->getZAccelPtr(),
        "Z Acceleration", 255, PixRGB<byte>(192, 192, 255));

  pwiiGUI->addMeter(controller->getBatteryPtr(),
          "Wiimote Battery", 255, PixRGB<byte>(100,100,100));

  pwiiGUI->addImage(controller->getIRImagePtr());

}
bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
    _currentRenderFrameInfo = FrameInfo();
    _currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();;
    _currentRenderFrameInfo.predictedDisplayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
    auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
    _currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
    _currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;

    std::array<glm::mat4, 2> handPoses;
    // Make controller poses available to the presentation thread
    ovr_for_each_hand([&](ovrHandType hand) {
        static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
        if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
            return;
        }

        auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
        static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
        handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
    });

    withRenderThreadLock([&] {
        _uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
        _handPoses = handPoses;
        _frameInfos[frameIndex] = _currentRenderFrameInfo;
    });
    return Parent::beginFrameRender(frameIndex);
}
示例#5
0
// ######################################################################
void BeoGPS::plotGPS()
{
   if(itsDisplayTimer.getSecs() > itsDisplayUpdateRate )
     {
       itsDisplayTimer.reset();

       //Draw The Map
       double mapScale = 1.0;
       Point2D<int> drawPos
         (int(itsPosition.x*mapScale + itsDispImage.getWidth()/2),
          int(itsPosition.y*mapScale + itsDispImage.getHeight()/2));
       if(itsDispImage.coordsOk(drawPos))
         itsDispImage.setVal(drawPos, PixRGB<byte>(0,255,0));


       //itsDispImage.setVal(Point2D<int>(100,100), PixRGB<byte>(0,255,255));

       Image<PixRGB<byte> > itsDrawitsDispImage(itsDispImage);
       drawCircle(itsDrawitsDispImage, drawPos, 7, PixRGB<byte>(255,255,255));

                         char buffer[128];
                         sprintf(buffer, "X=%2.2f Y=%2.2f Lon: %1.5f Lat: %1.5f PCSN: %-3d SatNum: %-3d",
                                         itsPosition.x,itsPosition.y,
                                         itsData.lat,itsData.lon,
                                         itsData.precision,itsData.satNum);
                         writeText(itsDispImage, Point2D<int>(0,0), buffer, PixRGB<byte>(255,255,255),
                                         PixRGB<byte>(0,0,0),SimpleFont::FIXED(8));

			 //LDEBUG("%s",buffer);
       itsOfs->writeRGB(itsDispImage, "GPSData", FrameInfo("GPSData",SRC_POS));
       itsOfs->updateNext();
     }
}
// -----------------------------------------------------------------------------
// CAMRAudioControllerUtility::SeekSync
// -----------------------------------------------------------------------------
//
TInt CAMRAudioPlayControllerDecoder::SeekSync(TUint8* aBuf, TInt aBufLen)
    {
    const TInt KMaxFrames = 3;          // number of frames to check
    const TInt KNotFound = aBufLen;     // sync not found position
    TAudioFrameInfo frameInfo;			// frame parameters
    TInt i = 0;
    TInt syncPos = KNotFound;
    TInt maxSeek = KMaxFrames;
    const TUint8* endPtr = aBuf + aBufLen;

    // Seek a valid frame candidate byte by byte until a valid frame
    // is found or all bytes have been checked.
    while (aBuf < endPtr  &&  syncPos == KNotFound)
    	{
        TInt seekCount = 0;
        const TUint8* framePtr = aBuf;
        TInt frameBufLen = aBufLen;
        syncPos = i;
        // Check the validity of this frame candidate and the nearest next
        // frames. If they are not OK, syncPos will be set to KNotFound.
        while (framePtr < endPtr  &&  syncPos != KNotFound  &&  seekCount < maxSeek)
        	{
            TInt length = FrameInfo(framePtr, frameBufLen, frameInfo);
            if (frameBufLen >= KAmrFrameHeaderSize1  &&  length == 0)
            	{
				syncPos = KNotFound;
				}
            framePtr += length;
            frameBufLen -= length;
            seekCount++;
        	}
        aBuf++; aBufLen--; i++;
    	}
    return syncPos;
    }
bool RobotBrainServiceService::start(int argc, char* argv[])
{
  char adapterStr[255];

  //Create the adapter
  int port = RobotBrainObjects::RobotBrainPort;
  bool connected = false;

  while(!connected)
    {
      try
        {
          LINFO("Trying Port:%d", port);
          sprintf(adapterStr, "default -p %i", port);
          itsAdapter = communicator()->createObjectAdapterWithEndpoints("SeaBee3SimulatorAdapter",
                                                                        adapterStr);
          connected = true;
        }
      catch(Ice::SocketException)
        {
          port++;
        }
    }

  //Create the manager and its objects
  itsMgr = new ModelManager("SeaBee3SimulatorServiceManager");

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(*itsMgr));
  itsMgr->addSubComponent(ofs);


  LINFO("Starting SeaBee3 Simulator");
  nub::ref<SeaBee3Simulator> subSim(new SeaBee3Simulator(*itsMgr, "SeaBee3Simulator", "SeaBee3Simulator"));
  itsMgr->addSubComponent(subSim);
  subSim->init(communicator(), itsAdapter);

  itsMgr->parseCommandLine((const int)argc, (const char**)argv, "", 0, 0);

  itsAdapter->activate();

  itsMgr->start();

  while(1){
    Layout<PixRGB<byte> > outDisp;

    subSim->simLoop();
    Image<PixRGB<byte> > forwardCam = flipVertic(subSim->getFrame(1));
    Image<PixRGB<byte> > downwardCam = flipVertic(subSim->getFrame(2));

    outDisp = vcat(outDisp, hcat(forwardCam, downwardCam));

    ofs->writeRgbLayout(outDisp, "subSim", FrameInfo("subSim", SRC_POS));

    handle_keys(ofs, subSim);
  }


  return true;
}
示例#8
0
 FrameInfo FrameDB::info_for(FrameId key) {
   frame_map::iterator i = frames.find(unalias(key));
   if (i != frames.end()) {
     return i->second;
   } else {
     return FrameInfo();
   }
 }
示例#9
0
文件: Logger.C 项目: binary42/avedac
void Logger::saveSingleEventFrame(MbariImage< PixRGB<byte> >& img,
        int frameNum,
        MbariVisualEvent::VisualEvent *event) {
    ASSERT(event->frameInRange(frameNum));

    // create the file stem
    string evnum;
    if (itsSaveEventFeatures.getVal().length() > 0)
        evnum = sformat("%s_evt%04d_", itsSaveEventFeatures.getVal().c_str(), event->getEventNum() );
    else
        evnum = sformat("evt%04d_", event->getEventNum());

    Dims maxDims = event->getMaxObjectDims();
    Dims d((float)maxDims.w()*itsScaleW, (float)maxDims.h()*itsScaleH);

    // compute the correct bounding box and cut it out
    Rectangle bbox1 = event->getToken(frameNum).bitObject.getBoundingBox();
    Rectangle bbox = Rectangle::tlbrI(bbox1.top()*itsScaleH, bbox1.left()*itsScaleW,
                                    bbox1.bottomI()*itsScaleH, bbox1.rightI()*itsScaleW);
    //Point2D cen = event.getToken(frameNum).bitObject.getCentroid();

    // first the horizontal direction
    int wpad = (d.w() - bbox.width()) / 2;
    int ll = bbox.left() - wpad;
    //int ll = cen.i - d.w() / 2;
    int rr = ll + d.w();
    if (ll < 0) {
        rr -= ll;
        ll = 0;
    }
    if (rr >= img.getWidth()) {
        rr = img.getWidth() - 1;
        ll = rr - d.w();
    }

    // now the same thing with the vertical direction
    int hpad = (d.h() - bbox.height()) / 2;
    int tt = bbox.top() - hpad;
    //int tt = cen.j - d.h() / 2;
    int bb = tt + d.h();
    if (tt < 0) {
        bb -= tt;
        tt = 0;
    }
    if (bb >= img.getHeight()) {
        bb = img.getHeight() - 1;
        tt = bb - d.h();
    }

    Rectangle bboxFinal = Rectangle::tlbrI(tt, ll, bb, rr);
    bboxFinal = bboxFinal.getOverlap(Rectangle(Point2D<int>(0, 0), img.getDims() - 1));

    // scale if needed and cut out the rectangle and save it
    Image< PixRGB<byte> > cut = crop(img, bboxFinal);
    itsOfs->writeFrame(GenericFrame(cut), evnum, FrameInfo(evnum, SRC_POS));
}
// ######################################################################
void AttentionGuidanceMapNF::save1(const ModelComponentSaveInfo& sinfo)
{
  if (itsSaveResults.getVal())
    {
      // get the OFS to save to, assuming sinfo is of type
      // SimModuleSaveInfo (will throw a fatal exception otherwise):
      nub::ref<FrameOstream> ofs =
        dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;      
      ofs->writeRgbLayout(itsNF->getDisplay(), "AGM-NF", FrameInfo("NF Model Output", SRC_POS));
    }
}
示例#11
0
SafeFrameIterator::SafeFrameIterator(IFrameIterator *it)
{	
	while (!it->Done())
	{
		FrameInfo info = FrameInfo(it);
		frames.append(info);
		it->Next(); 
	}
	
	it->Reset();
	current = 0;
}
// ######################################################################
void IntegerSimpleChannel::saveResults(const nub::ref<FrameOstream>& ofs)
{
GVX_TRACE(__PRETTY_FUNCTION__);

  if (itsPyr.isEmpty() == false)
    {
      // save raw pyramid levels?
      if (itsSaveRawMaps.getVal()) {
        for (uint i = 0; i < itsPyr.size(); i ++)
          ofs->writeFloat(Image<float>(itsPyr[i]), FLOAT_NORM_0_255,
                          sformat("ISR%s-%d-", tagName().c_str(),i),
                          FrameInfo(sformat("%s IntegerSimpleChannel raw map (%u of %u)",
                                            this->descriptiveName().c_str(),
                                            i, itsPyr.size()),
                                    SRC_POS));
      }

      // save center-surround feature submaps?
      if (itsSaveFeatureMaps.getVal())
        for (uint i = 0; i < numSubmaps(); i ++) {
          uint clev = 0, slev = 0;
          itsLevelSpec.getVal().indexToCS(i, clev, slev);
          ofs->writeFloat(Image<float>(getSubmapInt(i)),
                          FLOAT_NORM_0_255,
                          sformat("ISF%s-%d-%d-", tagName().c_str(),clev, slev),
                          FrameInfo(sformat("%s IntegerSimpleChannel center-surround map (c=%u s=%u)",
                                            this->descriptiveName().c_str(),
                                            clev, slev),
                                    SRC_POS));
        }
    }

  // save output map?
  if (itsSaveOutputMap.getVal())
    ofs->writeFloat(getOutput(), FLOAT_NORM_0_255,
                    sformat("ISO%s-", tagName().c_str()),
                    FrameInfo(sformat("%s IntegerSimpleChannel output",
                                      this->descriptiveName().c_str()),
                              SRC_POS));
}
示例#13
0
// ######################################################################
void TaskRelevanceMapTigs::save1(const ModelComponentSaveInfo& sinfo)
{
  if (itsSaveResults.getVal())
    {
      // get the OFS to save to, assuming sinfo is of type
      // SimModuleSaveInfo (will throw a fatal exception otherwise):
      nub::ref<FrameOstream> ofs =
        dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;

      ofs->writeFloat(itsCurrentTDMap, FLOAT_NORM_0_255, "TRM-SB",
                      FrameInfo("task relevance map static buffer", SRC_POS));
    }
}
// ######################################################################
void AttentionGuidanceMap::save1(const ModelComponentSaveInfo& sinfo)
{
  if (itsSaveResults.getVal())
    {
      // get the OFS to save to, assuming sinfo is of type
      // SimModuleSaveInfo (will throw a fatal exception otherwise):
      nub::ref<FrameOstream> ofs =
        dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;

      ofs->writeFloat(this->getV(), FLOAT_NORM_PRESERVE, "AGM",
                      FrameInfo("overall attention guidance map", SRC_POS));
    }
}
示例#15
0
// ######################################################################
void TaskRelevanceMapAdapter::save1(const ModelComponentSaveInfo& sinfo)
{
  if (itsSaveResults.getVal())
    {
      // get the OFS to save to, assuming sinfo is of type
      // SimModuleSaveInfo (will throw a fatal exception otherwise):
      nub::ref<FrameOstream> ofs =
        dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;

      ofs->writeFloat(itsMap, FLOAT_NORM_PRESERVE, "TRM",
                      FrameInfo("task relevance map (top-down)", SRC_POS));
    }
}
TInt CAMRAudioPlayControllerDecoder::FrameLength(const TUint8* aBuf, TInt aBufLen, TInt& aFrameLength)
	{
	TInt stat = KErrNone;
	TAudioFrameInfo info;
	TInt len = FrameInfo(aBuf, aBufLen, info);
	if (len > 0)
		{
		aFrameLength = len;
		}
	else
		{
		stat = KErrUnknown;
		}
	return stat;
	}
示例#17
0
bool DebugHmdDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
    _currentRenderFrameInfo = FrameInfo();
    _currentRenderFrameInfo.sensorSampleTime = secTimestampNow();
    _currentRenderFrameInfo.predictedDisplayTime = _currentRenderFrameInfo.sensorSampleTime;
    // FIXME simulate head movement
    //_currentRenderFrameInfo.renderPose = ;
    //_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;

    withNonPresentThreadLock([&] {
        _uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
        _frameInfos[frameIndex] = _currentRenderFrameInfo;
        
        _handPoses[0] = glm::translate(mat4(), vec3(-0.3f, 0.0f, 0.0f));
        _handLasers[0].color = vec4(1, 0, 0, 1);
        _handLasers[0].mode = HandLaserMode::Overlay;

        _handPoses[1] = glm::translate(mat4(), vec3(0.3f, 0.0f, 0.0f));
        _handLasers[1].color = vec4(0, 1, 1, 1);
        _handLasers[1].mode = HandLaserMode::Overlay;
    });
    return Parent::beginFrameRender(frameIndex);
}
PacketStreamReader::FrameInfo PacketStreamReader::_nextFrame()
{
    while (1)
    {
        auto t = _stream.peekTag();

        switch (t)
        {
        case TAG_PANGO_SYNC:
            SkipSync();
            break;
        case TAG_ADD_SOURCE:
            ParseNewSource();
            break;
        case TAG_SRC_JSON: //frames are sometimes preceded by metadata, but metadata must ALWAYS be followed by a frame from the same source.
        case TAG_SRC_PACKET:
            return _stream.peekFrameHeader(*this);
        case TAG_PANGO_STATS:
            ParseIndex();
            break;
        case TAG_PANGO_FOOTER: //end of frames
        case TAG_END:
            return FrameInfo(); //none
        case TAG_PANGO_HDR: //shoudln't encounter this
            ParseHeader();
            break;
        case TAG_PANGO_MAGIC: //or this
            SkipSync();
            break;
        default: //or anything else
            pango_print_warn("Unexpected packet type: \"%s\". Resyncing()\n", tagName(t).c_str());
            ReSync();
            break;
        }
    }

}
示例#19
0
void Localization::predict(RobotSimEvents::OrientationMessagePtr oMsg){
// This function will contain the code for predicting the next position of points using gaussian distribution in
// for the error in their movement


        if(oMsg->running){
        // sub is moving forward ... let's predict the sub's position with the model that we have
                // so we will use orientation from the message and forward the points in that direction according to the time elapsed

                time_t newtime,elapsedTime;
                time(&newtime);
                elapsedTime = newtime - timestamp ;

                if( elapsedTime == 0 )
                        return;
                else {
                        //clear the image
                        iMap = Image<float > (2000,2000,ZEROS);

                        // First retrieve the distance travelled in current orientation
                        int distance = elapsedTime * BOT_SPEED;
                        int changeInOrientation = curOrientation - oMsg->orientation;

                        float Edrift=0, Etrans=0;
                        // Now let's run the particles through the prediction equations to see their new positions
                        // There are two sources of error for forward movement. One is distance and the other one is orientation
                        // We will have to model errors in both of them.
                        for (std::vector<class pParticle>::iterator iter = pList.begin(); iter != pList.end() ; iter++ )
                        {
                                ///  First erase the current position of the point
                                iMap.setVal((iter->p).j, (iter->p).i, 0.0 );
                                // Do the update on each particles using the equation
                                // SIGMA_TRANS = SIGMA_TRANS * sqrt(EXPERIMENT_DIST) // EXPERIMENT_DIST in centimeters preferable
                                // SIGMA_DRIFT = SIGMA_DRIFT * sqrt(EXPERIMENT_DIST/2.0)
                                // We will consider SIGMA_TRANS and SIGMA_DRIFT on a unit distance
                                // here rand_N is a random number generator, yet to be implemented
                                Etrans = generate_trans() * distance; //rand_N (M_TRANS * distance, SIGMA_TRANS * distance);

                                //This is for a completely different purpose. here we know that it we rotated certain degrees... but this thing just models any error
                                // in the measures taken by compass ... so set the distributions accordingly
                                Edrift = generate_drift() * changeInOrientation; //rand_N (M_DRIFT * changeInOrientation, SIGMA_DRIFT * changeInOrientation);

                                // First adjust for the drift
                                iter->orientation = iter->orientation + Edrift;
                                // Now adjust for the error in distance travelled
                                (iter->p).i = (iter->p).i + (distance+Etrans)*cos(iter->orientation);
                                (iter->p).j = (iter->p).j + (distance+Etrans)*sin(iter->orientation);
                                // One more iteration to calculate the drift in orientation

                                Edrift = generate_drift() * changeInOrientation; //rand_N (M_DRIFT * distance, SIGMA_TRANS * distance);

                                iter->orientation = iter->orientation + Edrift;
                                // set the point in the image
                                iMap.setVal( (iter->p).j, (iter->p).i, 255*iter->getProb() );
                        }
                }

        } else {
                // if it is possible to be not moving and still changing the orientation then do add an orientation change loop here.

                // if we are not moving, then just update the timestamp... it is just a message to say that we are not moving right now.
                        time(&timestamp);
        }

        //
        getCurrentEstimate();
        iMap.setVal( maxPoint.p.j, maxPoint.p.i, 255);

        // we have estimate in maxpoint ... now plot it on the map

        // write the image out to the map
        itsOfs->writeRGB(iMap, "iMap", FrameInfo("IMap", SRC_POS));
        itsOfs->writeRGB(luminance(iMap), "B/W_iMap", FrameInfo("B/W_IMap", SRC_POS));
        itsOfs->updateNext();
}
示例#20
0
		void addFrame( int x, int y, int id, int image ){
			frames.emplace_back( id, FrameInfo( {x, y, image, 1000/frames_per_second} ) );
		}
// ######################################################################
void SimulationViewerStats::save1(const ModelComponentSaveInfo& sinfo)
{
  // Use a LINFO here since we are already slowed down by writing
  // stats, we should at least have a short debug on this fact
  LINFO("SAVING STATS TO %s",itsStatsFname.getVal().c_str());

  // Lock the file. We put this here to support multi-process in the
  // future. However, this will not work on NFS mounts.
  struct flock fl; int fd;
  lockFile(itsStatsFname.getVal().c_str(),fd,fl);

  // Since this is more or less debug info we open and flush every iteration.
  // rather than once each run.
  std::ofstream statsFile;
  statsFile.open(itsStatsFname.getVal().c_str(),std::ios_base::app);

  // get the OFS to save to, assuming sinfo is of type
  // SimModuleSaveInfo (will throw a fatal exception otherwise):
  nub::ref<FrameOstream> ofs =
    dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;

  // also get the SimEventQueue:
  SimEventQueue *q    = dynamic_cast<const SimModuleSaveInfo&>(sinfo).q;

  // initialize our stats dump file if desired:
  if(itsFrameIdx == 0)
  {
    rutz::shared_ptr<SimReqVCXmaps> vcxm(new SimReqVCXmaps(this));
    q->request(vcxm); // VisualCortex is now filling-in the maps...

    if (itsStatsFname.getVal().size())
    {
      itsStatsFile = new std::ofstream(itsStatsFname.getVal().c_str());
      if (itsStatsFile->is_open() == false)
        LFATAL("Failed to open '%s' for writing.",
               itsStatsFname.getVal().c_str());

      // dump the settings of the model:
      getRootObject()->printout(*itsStatsFile, "# ");

      // list all our channels:
      //LFATAL("FIXME");
      // also get the SimEventQueue:

      rutz::shared_ptr<ChannelMaps> chm        = vcxm->channelmaps();
      uint                          numSubmaps = chm->numSubmaps();

      *itsStatsFile << "# CHANNELS: ";

      for(uint i = 0; i < numSubmaps; i++)
      {
        NamedImage<float> smap = chm->getRawCSmap(i);
        *itsStatsFile << smap.name() << ", ";
      }
      *itsStatsFile << std::endl;
    }
  }

  // We flush frequently since this output is debuggy in nature or it's being used
  // to collect information which needs assurance of accuracy for instance in
  // RSVP analysis. It is better to err on the side of caution.
  (*itsStatsFile).flush();
  (*itsStatsFile).close();

  // get the basic input frame info
  if (SeC<SimEventInputFrame> e = q->check<SimEventInputFrame>(this))
  {
    itsSizeX       = e->frame().getWidth();
    itsSizeY       = e->frame().getHeight();
    itsFrameNumber = (unsigned int)e->frameNum();
    itsFrameIdx    = itsFrameNumber;
  }
  else
  {
    itsFrameNumber = itsFrameIdx;
    itsFrameIdx++;
  }

  // get the latest input frame:
  // Since we are only using it's basic statistics (Height / Width) , we don't care about it's
  // blackboard status. Use SEQ_ANY then. Otherwise, this will not fetch at any rate.
  Image< PixRGB<byte> > input;
  if (SeC<SimEventRetinaImage> e = q->check<SimEventRetinaImage>(this,SEQ_ANY))
    input = e->frame().colorByte();
  else
    LINFO("No input? Check the SimEventCue.");

  // Get the current frame number or keep track on your own
  /*
  if (SeC<SimEventInputFrame> e = q->check<SimEventInputFrame>(this))
    itsFrameIdx = e->frameNum();
  else
    itsFrameIdx++;
  */

  // get the latest raw AGM:
  Image<float> agm;
  if (SeC<SimEventAttentionGuidanceMapOutput> e =
      q->check<SimEventAttentionGuidanceMapOutput>(this))
    agm = e->agm(1.0F);
  else
    LINFO("No AGM? Check the SimEventCue.");

  // if we are missing input or agm, return:
  // We also need to warn so that we know why the stats file may be empty
  bool quit = false;
  if (input.initialized() == false)
    {
      LINFO("WARNING!!! Input seems not to be initialized, so detailed stats cannot be saved.");
      quit = true;
    }
  if(agm.initialized() == false)
    {
      LINFO("WARNING!!! NO Attention Guidance MAP \"AGM\" so detailed stats cannot be saved.");
      quit = true;
    }

  if(quit == true) return;

  // update the trajectory:
  Image< PixRGB<byte> > res;
  const int w = input.getWidth();

  // save image results? if so let's prepare it
  if (itsSaveXcombo.getVal() || itsSaveYcombo.getVal())
    {
      Image<float> nagm = getMap(*q);
      res = colGreyCombo(input, nagm, itsSaveXcombo.getVal(),
                         itsDisplayInterp.getVal());
    }

  // if we are saving single channel stats save saliency stats using a compatable format
  // SEE: SingleChannel.C / saveStats(..) for more info on format
  if (itsGetSingleChannelStats.getVal())
    saveCompat(agm);

  // Save a bunch of stats?
  if (statsFile)
    {
      // start with the current simulation time:
       statsFile <<std::endl<<"= "<<q->now().msecs()
                     <<" # time of frame in ms"<<std::endl;

      // get min/max/avg and stdev and number of peaks in AGM:
      float mi, ma, av; getMinMaxAvg(agm, mi, ma, av);
      double sdev = stdev(agm);
      double peaksum; int npeaks = findPeaks(agm, 0.0f, 255.0f, peaksum);

      // find the location of max in the AGM, at scale of original input:
      float maxval; Point2D<int> maxloc;
      findMax(agm, maxloc, maxval);
      float scale = float(w) / float(agm.getWidth());
      maxloc.i = int(maxloc.i * scale + 0.4999F);
      maxloc.j = int(maxloc.j * scale + 0.4999F);
      if (res.initialized())
        {
          drawPatch(res, maxloc, 4, COL_YELLOW);
          drawPatch(res, maxloc + Point2D<int>(w, 0), 4, COL_YELLOW);
        }

      // find the location of min in the AGM, at scale of original input:
      float minval; Point2D<int> minloc;
      findMin(agm, minloc, minval);
      minloc.i = int(minloc.i * scale + 0.4999F);
      minloc.j = int(minloc.j * scale + 0.4999F);
      if (res.initialized())
        {
          drawPatch(res, minloc, 4, COL_GREEN);
          drawPatch(res, minloc + Point2D<int>(w, 0), 4, COL_GREEN);
        }

      // save some stats for that location:
       statsFile  <<maxloc.i<<' '<<maxloc.j<<' '<<minloc.i<<' '
                  <<minloc.j<<' '<<ma<<' '<<mi<<' '<<av<<' '<<sdev
                  <<' '<<npeaks<<' '<<peaksum
                  <<" # Xmax Ymax Xmin Ymin max min avg std npeaks peaksum"
                  <<std::endl;

      // build a vector of points where we will save samples. First is
      // the max, second the min, then a bunch of random locations:
      std::vector<Point2D<int> > loc;
      loc.push_back(maxloc);
      loc.push_back(minloc);
      for (uint n = 0; n < 100; n ++)
        loc.push_back(Point2D<int>(randomUpToNotIncluding(input.getWidth()),
                              randomUpToNotIncluding(input.getHeight())));

      // Get all the conspicuity maps:
      ImageSet<float> cmap;
      //LFATAL("FIXME");
      rutz::shared_ptr<SimReqVCXmaps> vcxm(new SimReqVCXmaps(this));
      q->request(vcxm); // VisualCortex is now filling-in the maps...
      rutz::shared_ptr<ChannelMaps> chm = vcxm->channelmaps();
      uint numSubmaps = chm->numSubmaps();
      for(uint i=0;i < numSubmaps; i++)
      {
        NamedImage<float> tempMap = chm->getRawCSmap(i);
        Image<float> m = tempMap;
        cmap.push_back(m);

        // also store sample points at the min/max locations:
        Point2D<int> p; float v;
        findMax(m, p, v); loc.push_back(p);
        findMin(m, p, v); loc.push_back(p);
      }
      /*
      for (uint i = 0; i < itsBrain->getVC()->numChans(); i ++)
        {
          Image<float> m = itsBrain->getVC()->subChan(i)->getOutput();
          cmap.push_back(m);

          // also store sample points at the min/max locations:
          Point2D<int> p; float v;
          findMax(m, p, v); loc.push_back(p);
          findMin(m, p, v); loc.push_back(p);
        }
      */

      // Go over all sample points and save feature map and
      // conspicuity map values at those locations:
      for (uint i = 0; i < loc.size(); i ++)
        {
          Point2D<int> p = loc[i];
          Point2D<int> pp(int(p.i / scale), int(p.j / scale));

           statsFile <<p.i<<' '<<p.j<<"     ";

          // do the conspicuity maps first. Since they are all at the
          // scale of the AGM, we use pp:
          for (uint j = 0; j < cmap.size(); j ++)
          {
            if((int(p.i / scale) < agm.getWidth()) &&
               (int(p.j / scale) < agm.getHeight()))
            {
              (statsFile)<<cmap[j].getVal(pp)<<' ';
              (statsFile)<<"    ";
            }
            else
            {
              (statsFile)<<"-1"<<' ';
              (statsFile)<<"    ";
            }
          }

          // now the feature maps, we use coordinates p:
          /* TOO BOGUS - disabled for now
          std::vector<double> f;
          itsBrain->getVC()->getFeatures(p, f);
          for (uint j = 0; j < f.size(); j ++) (*statsFile)<<f[j]<<' ';
          */

           statsFile  <<"# features "<<i<<" at ("<<p.i
                         <<", "<<p.j<<')'<<std::endl;
        }
  }

  statsFile.flush();
  statsFile.close();
  unlockFile(itsStatsFname.getVal().c_str(),fd,fl);
  // save results?
  if (res.initialized())
    ofs->writeRGB(res, "T",
                  FrameInfo("SimulationViewerStats trajectory", SRC_POS));

  // Should we compute attention gate stats
  // If we have AG stats we will save the basic LAM stats anyways
  if(itsComputeAGStats.getVal())
    computeAGStats(*q);
  else if (SeC<SimEventAttentionGateOutput> ag =
           q->check<SimEventAttentionGateOutput>(this))
    computeLAMStats(ag->lam());

  //! Save the overlap image
  if(itsOverlap.initialized())
    ofs->writeRGB(itsOverlap, "AG-STAT-MASK",
                  FrameInfo("Stats mask overlap", SRC_POS));

  if(itsVisualSegments.initialized())
    ofs->writeRGB(itsVisualSegments, "AG-STAT-SEGS",
                  FrameInfo("Stats segments", SRC_POS));

  if(itsVisualCandidates.initialized())
    ofs->writeGray(itsVisualCandidates, "AG-STAT-CAND",
                   FrameInfo("Stats candidates", SRC_POS));

}
示例#22
0
int main(int argc, const char **argv)
{
  // Instantiate a ModelManager:
  ModelManager manager("Test wiimote");

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager));
  manager.addSubComponent(ofs);

  nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager));
  manager.addSubComponent(ifs);

  // Parse command-line:
  if (manager.parseCommandLine(argc, argv, "", 0, 0) == false) return(1);

  manager.start();



  //Init camara params
        itsIntrinsicMatrix = cvCreateMat( 3, 3, CV_32FC1);
        itsDistortionCoeffs = cvCreateMat( 4, 1, CV_32FC1);
  itsCameraRotation = cvCreateMat( 1, 3, CV_64FC1);
  itsCameraTranslation = cvCreateMat( 1, 3, CV_64FC1);

  //cvmSet(itsDistortionCoeffs, 0, 0, -0.2403274);
  //cvmSet(itsDistortionCoeffs, 1, 0, 2.5312502);
  //cvmSet(itsDistortionCoeffs, 2, 0, -0.0439848);
  //cvmSet(itsDistortionCoeffs, 3, 0, -0.0106820);
  cvmSet(itsDistortionCoeffs, 0, 0, 0);
  cvmSet(itsDistortionCoeffs, 1, 0, 0);
  cvmSet(itsDistortionCoeffs, 2, 0, 0);
  cvmSet(itsDistortionCoeffs, 3, 0, 0);

  cvmSet(itsCameraRotation, 0, 0, 2.391102);
  cvmSet(itsCameraRotation, 0, 1, 0);
  cvmSet(itsCameraRotation, 0, 2, 0);

  cvmSet(itsCameraTranslation, 0, 0, 0);
  cvmSet(itsCameraTranslation, 0, 1, 0);
  cvmSet(itsCameraTranslation, 0, 2, 840.954432);


  //cvmSet(itsIntrinsicMatrix, 0, 0, 290.85342); cvmSet(itsIntrinsicMatrix, 0, 1, 0); cvmSet(itsIntrinsicMatrix, 0, 2, 320/2); //159.50000);
  //cvmSet(itsIntrinsicMatrix, 1, 0, 0); cvmSet(itsIntrinsicMatrix, 1, 1, 290.85342 ); cvmSet(itsIntrinsicMatrix, 1, 2, 240/2); // 119.5);
  //cvmSet(itsIntrinsicMatrix, 2, 0, 0); cvmSet(itsIntrinsicMatrix, 2, 1, 0); cvmSet(itsIntrinsicMatrix, 2, 2, 1);

  cvmSet(itsIntrinsicMatrix, 0, 0, 415.5); cvmSet(itsIntrinsicMatrix, 0, 1, 0); cvmSet(itsIntrinsicMatrix, 0, 2, 320/2); //159.50000);
  cvmSet(itsIntrinsicMatrix, 1, 0, 0); cvmSet(itsIntrinsicMatrix, 1, 1, 436 ); cvmSet(itsIntrinsicMatrix, 1, 2, 240/2); // 119.5);
  cvmSet(itsIntrinsicMatrix, 2, 0, 0); cvmSet(itsIntrinsicMatrix, 2, 1, 0); cvmSet(itsIntrinsicMatrix, 2, 2, 1);

  bool drawGrid = true;
  bool saveCorners = false;
  bool calibrate = false;

  std::vector<CvPoint2D32f> allCorners;

  while(1)
  {

    GenericFrame input = ifs->readFrame();
    Image<PixRGB<byte> > img = input.asRgb();


    int rows = 4, cols = 3;

    std::vector<CvPoint2D32f> corners = findCorners(img, rows, cols);

    if (corners.size() == (uint)(rows*cols))
    {
      if (saveCorners)
        for(uint i=0; i<corners.size(); i++)
          allCorners.push_back(corners[i]);
      saveCorners = false;

      cvDrawChessboardCorners(img2ipl(img), cvSize(rows,cols), &corners[0], corners.size(), 1);
    }

    if (calibrate)
    {
      calibrateViews(allCorners, rows, cols);
      if (corners.size() == (uint)(rows*cols))
        findExtrinsic(corners, rows, cols);
      calibrate = false;
    }

    if (drawGrid)
      projectGrid(img);

    projectRect(img, 216.5, 279.5);


    processUserInput(ofs, drawGrid, saveCorners, calibrate);

    ofs->writeRGB(img, "Output", FrameInfo("Output", SRC_POS));

    ofs->updateNext();
  }

  // stop all our ModelComponents
  manager.stop();

  // all done!
  return 0;
}
示例#23
0
int main(const int argc, const char **argv)
{
  MYLOGVERB = LOG_INFO;
  ModelManager *mgr = new ModelManager("Test ObjRec");

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(*mgr));
  mgr->addSubComponent(ofs);

  nub::ref<InputFrameSeries> ifs(new InputFrameSeries(*mgr));
  mgr->addSubComponent(ifs);

  nub::ref<EnvSegmenterCannyContour> seg(new EnvSegmenterCannyContour(*mgr));
  mgr->addSubComponent(seg);

  mgr->exportOptions(MC_RECURSE);

  if (mgr->parseCommandLine(
        (const int)argc, (const char**)argv, "", 0, 0) == false)
    return 1;

  mgr->start();

  seg->setModelParamVal("CannyMinCos", 1.0);
  seg->setModelParamVal("CannyMaxArea", 6000);
  seg->setModelParamVal("CannyMaxArea", 12000);

  itsObjectDB.loadFrom("cards.vdb");
  while(1)
  {
    Image< PixRGB<byte> > inputImg;
    const FrameState is = ifs->updateNext();
    if (is == FRAME_COMPLETE)
      break;

    //grab the images
    GenericFrame input = ifs->readFrame();
    if (!input.initialized())
      break;
    inputImg = input.asRgb();

    Image<PixRGB<byte> > out;

    const Rectangle cardbox = seg->getFoa(inputImg, Point2D<int>(), NULL, &out);

    ofs->writeRGB(out, "input", FrameInfo("input", SRC_POS));

    if (cardbox.isValid())
    {
      Image<PixRGB<byte> > card =
        crop(inputImg, cardbox.getOverlap(inputImg.getBounds()));

      std::string cardName = recCard(card);

      if (cardName.length() == 0)
      {
        LINFO("Enter name for card:");
        std::getline(std::cin, cardName, '\n');

        if (cardName.length() > 0)
          trainCard(card, cardName);
      }

      writeText(card, Point2D<int>(0,0), cardName.c_str(),
          PixRGB<byte>(255), PixRGB<byte>(127));

      ofs->writeRGB(card, "card", FrameInfo("card", SRC_POS));
    }

    ofs->updateNext();
  }
  mgr->stop();

  return 0;

}
示例#24
0
// ######################################################################
int main(const int argc, const char **argv)
{
  MYLOGVERB = LOG_INFO;  // suppress debug messages

  volatile int signum = 0;
  catchsignals(&signum);

  ModelManager manager("Test Motion Energy");

  nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager));
  manager.addSubComponent(ifs);

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager));
  manager.addSubComponent(ofs);

  nub::ref<FoeDetector> fd(new FoeDetector(manager));
  manager.addSubComponent(fd);

  if (manager.parseCommandLine((const int)argc, (const char**)argv,
                               "<stimuli> <options>", 0, 9) == false)
    return(1);

  fd->reset(NUM_PYR_LEVEL, NUM_DIRS, NUM_SPEEDS);

  std::string stimuli("Image");
  if(manager.numExtraArgs() > 0)
    stimuli = manager.getExtraArgAs<std::string>(0);
  LINFO("Stimuli: %s", stimuli.c_str());

  manager.start();

  Timer timer(1000000);
  timer.reset();  // reset the timer
  int frame = 0;

  PauseWaiter p;

  uint step; step = 0;

  // to get to the good part
  //for(uint i = 0; i < 50; i++) //was 25
  //  ifs->updateNext();

  // get ground truth file 
  std::string gtFilename
    ("/lab/tmpib/u/siagian/neuroscience/Data/FOE/driving_nat_Browning.txt");
  std::vector<Point2D<int> > gt = getGT(gtFilename);
  int ldpos = gtFilename.find_last_of('.');
  std::string prefix = gtFilename.substr(0, ldpos);

  // for finding ground truth
  rutz::shared_ptr<XWinManaged> win;
  
  float totalErr = 0.0;

  std::vector<std::string> args;
  for(uint i = 0; i < manager.numExtraArgs(); i++)
    args.push_back(manager.getExtraArgAs<std::string>(i)); 

  Image<byte> prevLum;
  Image<PixRGB<byte> > prevImage;
  Image<PixRGB<byte> > prevImage2;
  while (1)
    {
      if (signum != 0)
        {
          LINFO("quitting because %s was caught", signame(signum));
          break;
        }

      if (ofs->becameVoid())
        {
          LINFO("quitting because output stream was closed or became void");
          break;
        }

      if (p.checkPause())
        continue;

      const FrameState is = ifs->updateNext();
      if (is == FRAME_COMPLETE) break; // done receiving frames

      Image< PixRGB<byte> > input = ifs->readRGB();
      if(frame == 0) 
        {
          uint width  = input.getWidth();
          uint height = input.getHeight();
          win.reset(new XWinManaged(Dims(width, height), 0, 0, "GT"));
        }

      // empty image signifies end-of-stream
      if (!input.initialized()) break;
      Image<byte> lum = luminance(input);
      Point2D<float> pshift(0.0,0.0); 
      if(step != 0)
        {
          // calculate planar shift using SIFT 
          lum = calculateShift(lum,prevLum, ofs);
        }
      if( manager.numExtraArgs() > 0)
        lum = getImage(stimuli, args, fd, step);

      // for saving videos
      prevImage2 = prevImage;
      prevImage  = input;

      if (!lum.initialized()) break; step++;

      // compute the focus of expansion (FOE)
      Point2D<int> foe = fd->getFoe(lum, FOE_METHOD_TEMPLATE, false);
      //Point2D<int> foe = fd->getFoe(lum, FOE_METHOD_AVERAGE);
      LINFO("[%d]Foe: %d %d", frame, foe.i, foe.j);

      // illustration of the size of the receptive field
      if(!stimuli.compare("ShowRF"))
        {
          uint rfI = 44;
          uint rfJ = 152;
          lum.setVal(rfI, rfJ, 300.0F);      
          drawRect(lum, Rectangle::tlbrI(144,36,159,51), byte(255));
          drawRect(lum, Rectangle::tlbrI(148,40,155,47), byte(255));
          
          drawRect(lum, Rectangle::tlbrI(rfJ-8, rfI-8, rfJ+8, rfI+8), byte(255));
          drawRect(lum, Rectangle::tlbrI(rfJ-16,rfI-16,rfJ+16,rfI+16), byte(255));
        }

      ofs->writeGrayLayout(fd->getMTfeaturesDisplay(lum), "MT Features",
                           FrameInfo("motion energy output images", SRC_POS));

      // write the file
      if(frame >= 4)
        {
          float err = foe.distance(gt[frame-2]); 
          totalErr += err;
          LINFO("Foe: %d %d: GT: %d %d --> %f --> avg: %f", 
                foe.i, foe.j, gt[frame-2].i, gt[frame-2].j, 
                err, totalErr/(frame-3));

          Image<PixRGB<byte> > simg = prevImage2;
          drawCross(simg, foe        , PixRGB<byte>(0,255,0), 10, 2);
          drawCross(simg, gt[frame-2], PixRGB<byte>(255,0,0), 10, 2);
          win->drawImage(simg,0,0);
          //Raster::WriteRGB(simg, sformat("%s_STnPS_%06d.ppm", prefix.c_str(), frame-2));
        }

      //ofs->writeGrayLayout
      //  (lum, "test-FOE Main", FrameInfo("foe output", SRC_POS));
      const FrameState os = ofs->updateNext();
      //LINFO("frame[%d]: %8.3f %8.3f", frame, pshift.i, pshift.j); 
      Raster::waitForKey();

      if (os == FRAME_FINAL)
        break;

      prevLum  = lum;
      frame++;
    }

  LINFO("%d frames in %gs (%.2ffps)\n", 
        frame, timer.getSecs(), frame / timer.getSecs());

  // stop all our ModelComponents
  manager.stop();

  // all done!
  return 0;

}
示例#25
0
文件: Logger.C 项目: binary42/avedac
void Logger::save(const Image<float>& img,
        const uint frameNum,
        const string& resultName,
        const int resNum) {
    itsOfs->writeFrame(GenericFrame(img, FLOAT_NORM_0_255), getFileStem(resultName, resNum), FrameInfo(resultName, SRC_POS));
}
示例#26
0
文件: Logger.C 项目: binary42/avedac
void Logger::save(const Image< PixRGB<byte> >& img,
        const uint frameNum,
        const string& resultName,
        const int resNum) {
    itsOfs->writeFrame(GenericFrame(img), getFileStem(resultName, resNum), FrameInfo(resultName, SRC_POS));
}
示例#27
0
文件: Logger.C 项目: binary42/avedac
// ######################################################################
void Logger::run(nub::soft_ref<MbariResultViewer> rv, MbariImage<PixRGB <byte> >& img,
                                        MbariVisualEvent::VisualEventSet& eventSet, const Dims scaledDims)
{
    // adjust scaling if needed
    Dims d = img.getDims();
    itsScaleW = (float)d.w()/(float)scaledDims.w();
    itsScaleH = (float)d.h()/(float)scaledDims.h();

    // initialize property vector and FOE estimator
    MbariVisualEvent::PropertyVectorSet pvs;

    // this is a list of all the events that have a token in this frame
    std::list<MbariVisualEvent::VisualEvent *> eventFrameList;

    // this is a complete list of all those events that are ready to be written
    std::list<MbariVisualEvent::VisualEvent *> eventListToSave;

    // get event frame list for this frame and those events that are ready to be saved
    // this is a list of all the events that have a token in this frame
    eventFrameList = eventSet.getEventsForFrame(img.getFrameNum());

    // this is a complete list of all those events that are ready to be written
    eventListToSave = eventSet.getEventsReadyToSave(img.getFrameNum());

    // write out eventSet?
    if (itsSaveEventsName.getVal().length() > 0 ) saveVisualEvent(eventSet, eventFrameList);

    // write out summary ?
    if (itsSaveSummaryEventsName.getVal().length() > 0) saveVisualEventSummary(Version::versionString(), eventListToSave);

    // flag events that have been saved for delete
    std::list<MbariVisualEvent::VisualEvent *>::iterator i;
    for (i = eventListToSave.begin(); i != eventListToSave.end(); ++i)
        (*i)->flagWriteComplete();

    // write out positions?
    if (itsSavePositionsName.getVal().length() > 0) savePositions(eventFrameList);

    MbariVisualEvent::PropertyVectorSet pvsToSave = eventSet.getPropertyVectorSetToSave();

    // write out property vector set?
    if (itsSavePropertiesName.getVal().length() > 0) saveProperties(pvsToSave);

    // TODO: this is currently not used...look back in history to where this got cut-out
    // need to obtain the property vector set?
    if (itsLoadPropertiesName.getVal().length() > 0) pvs = eventSet.getPropertyVectorSet();

    // get a list of events for this frame
    eventFrameList = eventSet.getEventsForFrame(img.getFrameNum());

    // write out eventSet to XML?
    if (itsSaveXMLEventSetName.getVal().length() > 0) {
        saveVisualEventSetToXML(eventFrameList,
                img.getFrameNum(),
                img.getMetaData().getTC(),
                itsFrameRange);
    }

    const int circleRadiusRatio = 40;
    const int circleRadius = img.getDims().w() / circleRadiusRatio;

    Image< PixRGB<byte> > output = rv->createOutput(img,
            eventSet,
            circleRadius,
            itsScaleW, itsScaleH);

    // write  ?
    if (itsSaveOutput.getVal())
        itsOfs->writeFrame(GenericFrame(output), "results", FrameInfo("results", SRC_POS));

    // display output ?
    rv->display(output, img.getFrameNum(), "Results");

    // need to save any event clips?
    if (itsSaveEventNumsAll) {
        //save all events
        std::list<MbariVisualEvent::VisualEvent *>::iterator i;
        for (i = eventFrameList.begin(); i != eventFrameList.end(); ++i)
            saveSingleEventFrame(img, img.getFrameNum(), *i);
    } else {
        // need to save any particular event clips?
        uint csavenum = numSaveEventClips();
        for (uint idx = 0; idx < csavenum; ++idx) {
            uint evnum = getSaveEventClipNum(idx);
            if (!eventSet.doesEventExist(evnum)) continue;

            MbariVisualEvent::VisualEvent *event = eventSet.getEventByNumber(evnum);
            if (event->frameInRange(img.getFrameNum()))
                saveSingleEventFrame(img, img.getFrameNum(), event);
        }
    }

    //flag events that have been saved for delete otherwise takes too much memory
    for (i = eventListToSave.begin(); i != eventListToSave.end(); ++i)
        (*i)->flagForDelete();
    while (!eventFrameList.empty()) eventFrameList.pop_front();
    while (!eventListToSave.empty()) eventListToSave.pop_front();

}
示例#28
0
/*
XWinManaged xwin(Dims(WIDTH,HEIGHT*2), 1, 1, "Test SIFT");


rutz::shared_ptr<VisualObject> objTop, objBottom;

void showObjs(rutz::shared_ptr<VisualObject> obj1, rutz::shared_ptr<VisualObject> obj2){
        //return ;

        Image<PixRGB<byte> > keyIma = rescale(obj1->getKeypointImage(),
                        WIDTH, HEIGHT);
        objTop = obj1;

        if (obj2.is_valid()){
                keyIma = concatY(keyIma, rescale(obj2->getKeypointImage(),
                                        WIDTH, HEIGHT));
                objBottom = obj2;
        }

        xwin.drawImage(keyIma);
}

void showKeypoint(rutz::shared_ptr<VisualObject> obj, int keypi,
                Keypoint::CHANNEL channel = Keypoint::ORI){

        char winTitle[255];
        switch(channel){
                case Keypoint::ORI:
                        sprintf(winTitle, "Keypoint view (Channel ORI)");
                        break;
                case Keypoint::COL:
                        sprintf(winTitle, "Keypoint view (Channel COL)");
         break;
                default:
                        sprintf(winTitle, "Keypoint view (Channel   )");
                        break;
        }


        rutz::shared_ptr<Keypoint> keyp = obj->getKeypoint(keypi);
        float x = keyp->getX();
        float y = keyp->getY();
        float s = keyp->getS();
        float o = keyp->getO();
        float m = keyp->getM();

        uint FVlength = keyp->getFVlength(channel);
        if (FVlength<=0) return; //dont show the Keypoint if we dont have a FV

        XWinManaged *xwinKey = new XWinManaged(Dims(WIDTH*2,HEIGHT), -1, -1, winTitle);


        //draw the circle around the keypoint
        const float sigma = 1.6F * powf(2.0F, s / float(6 - 3));
        const float sig = 1.5F * sigma;
        const int rad = int(3.0F * sig);

        Image<PixRGB<byte> > img = obj->getImage();
        Point2D<int> loc(int(x + 0.5F), int(y + 0.5F));
        drawCircle(img, loc, rad, PixRGB<byte>(255, 0, 0));
        drawDisk(img, loc, 2, PixRGB<byte>(255,0,0));

        s=s*5.0F; //mag for scale
        if (s > 0.0f) drawLine(img, loc,
                        Point2D<int>(int(x + s * cosf(o)  + 0.5F),
                                int(y + s * sinf(o) + 0.5F)),
                        PixRGB<byte>(255, 0, 0));

        char info[255];
        sprintf(info, "(%0.2f,%0.2f) s=%0.2f o=%0.2f m=%0.2f", x, y, s, o, m);

        writeText(img, Point2D<int>(0, HEIGHT-20), info,
                        PixRGB<byte>(255), PixRGB<byte>(127));


        //draw the vectors from the features vectors

        Image<PixRGB<byte> > fvDisp(WIDTH, HEIGHT, NO_INIT);
        fvDisp.clear(PixRGB<byte>(255, 255, 255));
        int xBins = int((float)WIDTH/4);
        int yBins = int((float)HEIGHT/4);

        drawGrid(fvDisp, xBins, yBins, 1, 1, PixRGB<byte>(0, 0, 0));



        switch (channel){
                case Keypoint::ORI:
                        for (int xx=0; xx<4; xx++){
                                for (int yy=0; yy<4; yy++){
                                        for (int oo=0; oo<8; oo++){
                                                Point2D<int> loc(xBins/2+(xBins*xx), yBins/2+(yBins*yy));
                                                byte mag = keyp->getFVelement(xx*32+yy*8+oo, channel);
                                                mag = mag/4;
                                                drawDisk(fvDisp, loc, 2, PixRGB<byte>(255, 0, 0));
                                                drawLine(fvDisp, loc,
                                                                Point2D<int>(int(loc.i + mag*cosf(oo*M_PI/4)),
                                                                        int(loc.j + mag*sinf(oo*M_PI/4))),
                                                                PixRGB<byte>(255, 0, 0));
                                        }
                                }
                        }
                        break;

                case Keypoint::COL:
                        for (int xx=0; xx<4; xx++){
                                for (int yy=0; yy<4; yy++){
                                        for (int cc=0; cc<3; cc++){
                                                Point2D<int> loc(xBins/2+(xBins*xx), yBins/2+(yBins*yy));
                                                byte mag = keyp->getFVelement(xx*12+yy*3+cc, channel);
                                                mag = mag/4;
                                                drawDisk(fvDisp, loc, 2, PixRGB<byte>(255, 0, 0));
                                                drawLine(fvDisp, loc,
                                                                Point2D<int>(int(loc.i + mag*cosf(-1*cc*M_PI/2)),
                                                                        int(loc.j + mag*sinf(-1*cc*M_PI/2))),
                                                                PixRGB<byte>(255, 0, 0));
                                        }
                                }
                        }
                        break;
                default:
                        break;
        }



        Image<PixRGB<byte> > disp = img;
        disp = concatX(disp, fvDisp);


        xwinKey->drawImage(disp);

        while(!xwinKey->pressedCloseButton()){
                usleep(100);
        }
        delete xwinKey;

}



void analizeImage(){
   int key = -1;

        while(key != 24){ // q to quit window
                key = xwin.getLastKeyPress();
                Point2D<int>  point = xwin.getLastMouseClick();
                if (point.i > -1 && point.j > -1){

                        //get the right object
                        rutz::shared_ptr<VisualObject> obj;
                        if (point.j < HEIGHT){
                                obj = objTop;
                        } else {
                                obj = objBottom;
                                point.j = point.j - HEIGHT;
                        }
                        LINFO("ClickInfo: key = %i, p=%i,%i", key, point.i, point.j);

                        //find the keypoint
                        for(uint i=0; i<obj->numKeypoints(); i++){
                                rutz::shared_ptr<Keypoint> keyp = obj->getKeypoint(i);
                                float x = keyp->getX();
                                float y = keyp->getY();

                                if ( (point.i < (int)x + 5 && point.i > (int)x - 5) &&
                                          (point.j < (int)y + 5 && point.j > (int)y - 5)){
                                        showKeypoint(obj, i, Keypoint::ORI);
                                        showKeypoint(obj, i, Keypoint::COL);
                                }

                        }

                }
        }

}
*/
int main(const int argc, const char **argv)
{

  MYLOGVERB = LOG_INFO;
  ModelManager manager("Test SIFT");



  nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager));
  manager.addSubComponent(ifs);

  nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager));
  manager.addSubComponent(ofs);



  if (manager.parseCommandLine(
        (const int)argc, (const char**)argv, "<database file> <trainingLabel>", 2, 2) == false)
    return 0;

  manager.start();

  Timer masterclock;                // master clock for simulations
  Timer timer;

  const char *vdbFile = manager.getExtraArg(0).c_str();
  const char *trainingLabel = manager.getExtraArg(1).c_str();

  int numMatches = 0; //the number of correct matches
  int totalObjects = 0; //the number of objects presented to the network
  int uObjId = 0; //a unique obj id for sift

  bool train = false;
  //load the database file
 // if (!train)
  vdb.loadFrom(std::string(vdbFile));

  while(1)
  {
    Image< PixRGB<byte> > inputImg;
    const FrameState is = ifs->updateNext();
    if (is == FRAME_COMPLETE)
      break;

    //grab the images
    GenericFrame input = ifs->readFrame();
    if (!input.initialized())
      break;
    inputImg = input.asRgb();
    totalObjects++;

    ofs->writeRGB(inputImg, "Input", FrameInfo("Input", SRC_POS));


    if (train)
    {
      //add the object to the database
      char objName[255]; sprintf(objName, "%s_%i", trainingLabel, uObjId);
      uObjId++;
      rutz::shared_ptr<VisualObject>
        vo(new VisualObject(objName, "NULL", inputImg,
              Point2D<int>(-1,-1),
              std::vector<float>(),
              std::vector< rutz::shared_ptr<Keypoint> >(),
              USECOLOR));

      vdb.addObject(vo);
    } else {

      //get the object classification
      std::string objName;
      std::string tmpName = matchObject(inputImg);
      int i = tmpName.find("_");
      objName.assign(tmpName, 0, i);
      LINFO("Object name %s", objName.c_str());
      printf("%i %s\n", ifs->frame(), objName.c_str());

      if (objName == trainingLabel)
        numMatches++;

      //printf("objid %i:class %i:rate=%0.2f\n",
      //    objData.description.c_str(), objData.id, cls,
      //    (float)numMatches/(float)totalObjects);
    }
  }

  if (train)
  {
    printf("Trained on %i objects\n", totalObjects);
    printf("Object in db %i\n" , vdb.numObjects());
    vdb.saveTo(std::string(vdbFile));
  } else {
    printf("Classification Rate: %i/%i %0.2f\n",
        numMatches, totalObjects,
        (float)numMatches/(float)totalObjects);
  }


}
// ######################################################################
void SimulationViewerSurpCont::saveResults(const nub::ref<FrameOstream>& ofs)
{
  // update our internal time:
  double msecs = itsCurrTime.msecs();

  LINFO("Running Surprise Control on Sample Input time %f ms",msecs);


  LFATAL("FIXME");
  ////  itsScaleSurpriseControl.SSCprocessFrame(itsBrain);

  LINFO("Saving Surprise Control Output");
  Image<PixRGB<byte> > bimage;

  Image<PixRGB<float> > outImage = itsScaleSurpriseControl.SSCgetFrame();
  bimage = outImage;


  ofs->writeRGB(bimage, "SSC", FrameInfo("ScaleSurpriseControl final image",
                                       SRC_POS));

  Image<PixRGB<float> > diffImage =
    itsScaleSurpriseControl.SSCgetDiffImage(false);
  bimage = diffImage;

  ofs->writeRGB(bimage, "SSC-diff",
                FrameInfo("ScaleSurpriseControl diff image",SRC_POS));

  diffImage = itsScaleSurpriseControl.SSCgetDiffImage(true);
  bimage    = diffImage;

  ofs->writeRGB(bimage, "SSC-diff-norm",
              FrameInfo("ScaleSurpriseControl diff image normalized",SRC_POS));

  if(itsDrawDiffParts.getVal())
  {
    std::vector<Image<PixRGB<float> > > diffParts =
      itsScaleSurpriseControl.SSCgetDiffParts();
    std::vector<Image<PixRGB<float> > >::const_iterator diffPartsItr =
      diffParts.begin();
    ushort type = 0;
    while(diffPartsItr != diffParts.end())
    {
      bimage = *diffPartsItr;
      char name[100];
      if(type == 0)
        sprintf(name,"SSC-diffParts-H1-");
      else if(type == 1)
        sprintf(name,"SSC-diffParts-H2-");
      else if(type == 2)
        sprintf(name,"SSC-diffParts-S-");
      else if(type == 3)
        sprintf(name,"SSC-diffParts-V-");
      else
        sprintf(name,"SSC-diffParts-%d-",type);
      std::string prefix    = name;
      std::string frameInfo = "ScaleSurpriseControl difference ";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));
      ++diffPartsItr; type++;
    }
  }

  if(itsDrawBetaParts.getVal())
  {
    std::vector<Image<float> > betaParts =
      itsScaleSurpriseControl.SSCgetBetaParts(false);
    std::vector<Image<float> >::const_iterator betaPartsItr =
      betaParts.begin();
    ushort type = 0;
    while(betaPartsItr != betaParts.end())
    {
      bimage = *betaPartsItr;
      char name[100];
      sprintf(name,"SSC-betaParts-%s-",sc_channel_name_abv[type].c_str());
      std::string prefix    = name;
      std::string frameInfo = "ScaleSurpriseControl beta ";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));
      ++betaPartsItr; type++;
    }

    betaParts = itsScaleSurpriseControl.SSCgetBetaParts(true);
    betaPartsItr = betaParts.begin();
    type = 0;
    while(betaPartsItr != betaParts.end())
    {
      bimage = *betaPartsItr;
      char name[100];
      sprintf(name,"SSC-betaParts-norm-%s-",sc_channel_name_abv[type].c_str());
      std::string prefix    = name;
      std::string frameInfo = "ScaleSurpriseControl beta norm";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));
      ++betaPartsItr; type++;
    }
  }

  if(itsDrawBiasParts.getVal())
  {
    std::vector<Image<PixRGB<float> > > biasH1;
    std::vector<Image<PixRGB<float> > > biasH2;
    std::vector<Image<PixRGB<float> > > biasS;
    std::vector<Image<PixRGB<float> > > biasV;

    itsScaleSurpriseControl.SSCgetBiasParts(biasH1,biasH2,biasS,biasV);

    std::vector<Image<PixRGB<float> > >::const_iterator biasH1Itr =
      biasH1.begin();
    std::vector<Image<PixRGB<float> > >::const_iterator biasH2Itr =
      biasH2.begin();
    std::vector<Image<PixRGB<float> > >::const_iterator biasSItr  =
      biasS.begin();
    std::vector<Image<PixRGB<float> > >::const_iterator biasVItr  =
      biasV.begin();

    ushort scale = 0;

    while(biasH1Itr != biasH1.end())
    {
      char name[100];

      bimage = *biasH1Itr;
      sprintf(name,"SSC-biasParts-H1-%d-",scale);
      std::string prefix    = name;
      std::string frameInfo = "ScaleSurpriseControl biasH1 ";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));

      bimage = *biasH2Itr;
      sprintf(name,"SSC-biasParts-H2-%d-",scale);
      prefix    = name;
      frameInfo = "ScaleSurpriseControl biasH2 ";
      frameInfo = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));

      bimage = *biasSItr;
      sprintf(name,"SSC-biasParts-S-%d-",scale);
      prefix    = name;
      frameInfo = "ScaleSurpriseControl biasS ";
      frameInfo = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));

      bimage = *biasVItr;
      sprintf(name,"SSC-biasParts-V-%d-",scale);
      prefix    = name;
      frameInfo = "ScaleSurpriseControl biasV ";
      frameInfo = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));
      ++biasH1Itr; ++biasH2Itr; ++biasSItr; ++biasVItr; scale++;
    }
  }

  if(itsDrawSeperableParts.getVal())
  {
    std::vector<Image<PixRGB<float> > > Zimgs;
    std::vector<Image<PixRGB<float> > > Yimgs;

    itsScaleSurpriseControl.SSCgetSeperableParts(Zimgs,Yimgs,false);

    std::vector<Image<PixRGB<float> > >::const_iterator Zitr = Zimgs.begin();
    std::vector<Image<PixRGB<float> > >::const_iterator Yitr = Yimgs.begin();

    ushort scale = 0;

    while(Zitr != Zimgs.end())
    {

      char name[100];
      bimage = *Zitr;
      sprintf(name,"SSC-seperable-parts-Z-%d-",scale);
      std::string prefix    = name;
      std::string frameInfo = "Seperable Parts Z";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));

      bimage = *Yitr;
      sprintf(name,"SSC-seperable-parts-Y-%d-",scale);
      prefix    = name;
      frameInfo = "Seperable Parts Y";
      frameInfo             = frameInfo + prefix;
      ofs->writeRGB(bimage, prefix, FrameInfo(frameInfo,SRC_POS));

      ++Zitr; ++Yitr; scale++;
    }
  }
}