Example #1
0
Symbol COMPONENT_CLASS_CPP::event(Event* event)
{
	switch(event->type)
	{
		case EVENT_STATE_SET:
		{
			//	extract DataML
			EventStateSet* data = (EventStateSet*) event->data;
			XMLNode xmlNode(data->state);
			DataMLNode nodeState(&xmlNode);

			//	ok
			return C_OK;
		}

		case EVENT_INIT_CONNECT:
		{
			//	create output
			outputA.setName("adjacent");
			outputA.create(hComponent);
			outputA.setStructure(TYPE_DOUBLE | TYPE_COMPLEX | TYPE_CPXFMT_ADJACENT, Dims(2, 3).cdims());

			//	create output
			outputI.setName("interleaved");
			outputI.create(hComponent);
			outputI.setStructure(TYPE_DOUBLE | TYPE_COMPLEX | TYPE_CPXFMT_INTERLEAVED, Dims(2, 3).cdims());

			//	ok
			return C_OK;
		}

		case EVENT_RUN_SERVICE:
		{
			//	access output
			DOUBLE* pA = (DOUBLE*) outputA.getContent();
			DOUBLE* pI = (DOUBLE*) outputI.getContent();

			//	put incrementing numbers in
			UINT32 sz = 6;
			for (UINT32 i=0; i<sz; i++)
			{
				DOUBLE valR = i;
				DOUBLE valI = i + sz;

				pA[i] = valR;
				pA[i+sz] = valI;

				pI[i*2] = valR;
				pI[i*2+1] = valI;
			}

			//	ok
			return C_OK;
		}

	}

	//	not serviced
	return S_NULL;
}
Example #2
0
GenericFrame convert_disparity_message_to_generic_frame(carmen_simple_stereo_disparity_message* message)
{
	Image< PixRGB<byte> > rgbimg(Dims(image_width, image_height), ZEROS);

	get_depth_map_from_disparity_map(message->disparity, temp_depth_map, camera_instance, 50.0);

	for(int i = 0; i < (image_width * image_height); i++)
	{
		temp_disparity_map[i] = message->disparity[i];

		if(i > (image_width * 170) && i < ( image_width * (image_height - 110)) && temp_depth_map[i] <= 12000)
		{
			//temp_depth_map[i] = 12000.0 - temp_depth_map[i];
			rgbimg[i].p[0] = message->reference_image[3 * i];
			rgbimg[i].p[1] = message->reference_image[3 * i + 1];
			rgbimg[i].p[2] = message->reference_image[3 * i + 2];
		}
		else
		{
			temp_depth_map[i] = 0.0;
			rgbimg[i].p[0] = message->reference_image[3 * i] = 0.0;
			rgbimg[i].p[1] = message->reference_image[3 * i + 1] = 0.0;
			rgbimg[i].p[2] = message->reference_image[3 * i + 2] = 0.0;
		}
	}

	Image<unsigned short> depthimg(temp_depth_map, Dims(image_width, image_height));
	GenericFrame gf(rgbimg, depthimg);

	return gf;
}
Example #3
0
Layout<PixRGB<byte> > NeuralSimModule<T>::getDisplay() const
{
  if (itsInput[0].initialized())
    return itsPlot->draw(*itsStructure, itsInput[0], its2DPlotSize.getVal().w(), 
                         its2DPlotSize.getVal().h(), Dims(0,1),  
                         itsPlotLength.getVal(), itsProbeDepth.getVal(),
                         itsDisplayOutput.getVal());
  else
    return itsPlot->draw(*itsStructure, its2DPlotSize.getVal().w(), 
                         its2DPlotSize.getVal().h(), Dims(0,1),  
                         itsPlotLength.getVal(), itsProbeDepth.getVal(),
                         itsDisplayOutput.getVal());
}
// ######################################################################
void ContourBoundaryDetector::displayContourBoundary()
{
  int w = itsImage.getWidth();
  int h = itsImage.getHeight();

  if(itsWin.is_invalid())
    itsWin.reset(new XWinManaged(Dims(w,h), w+10, 0, "Contours"));
  else itsWin->setDims(Dims(w,h));

  // display the contour
  itsWin->drawImage(getContourBoundaryImage(),0,0);
  //Raster::waitForKey();
}
Example #5
0
/* Cliff's new function for adding blank rows to C model values */
SEXP addBlankModelValueRows(SEXP Sextptr, SEXP numAdded){
    if(!isInteger(numAdded)) {
        PRINTF("Error: numAdded is not an integer!\n");
        return(returnStatus(false) );
    }
    
    if(!R_ExternalPtrAddr(Sextptr)) {
        PRINTF("Error: Sextptr is not a valid external pointer\n");
        return(returnStatus(false) );
    }
    NimVecType *typePtr = static_cast< NimVecType* >(R_ExternalPtrAddr(Sextptr));
    nimType vecType = (*typePtr).getNimType();
    if(vecType == DOUBLE){
    	VecNimArrBase<double> *matPtr = static_cast< VecNimArrBase<double>* >(R_ExternalPtrAddr(Sextptr));
    	int nrowCpp = matPtr->size();
	    int new_size = INTEGER(numAdded)[0] + nrowCpp;
    	matPtr->resize(new_size);
    	NimArrBase<double> *thisRow;
    	thisRow = matPtr->getBasePtr(0);
    	int numDims = thisRow->numDims();
	     vector<int> Dims(numDims);
 		for(int i = 0; i < numDims; i++)
      		Dims[i] = thisRow->dimSize(i);
    	for(int i = nrowCpp; i < INTEGER(numAdded)[0] + nrowCpp; i++){
      		thisRow = matPtr->getBasePtr(i); 
      		thisRow->setSize(Dims);
    	}
    return(returnStatus(true) );
    }
    
    else if(vecType == INT){
    	VecNimArrBase<int> *matPtr = static_cast< VecNimArrBase<int>* >(R_ExternalPtrAddr(Sextptr));
    	int nrowCpp = matPtr->size();
    	  int new_size = INTEGER(numAdded)[0] + nrowCpp;
	  	matPtr->resize(new_size);
    	NimArrBase<int> *thisRow;
    	thisRow = matPtr->getBasePtr(0);
    	int numDims = thisRow->numDims();
  	    vector<int> Dims(numDims);
		for(int i = 0; i < numDims; i++)
      		Dims[i] = thisRow->dimSize(i);
    	for(int i = nrowCpp; i < INTEGER(numAdded)[0] + nrowCpp; i++){
      		thisRow = matPtr->getBasePtr(i); 
      		thisRow->setSize(Dims);
    	}
    return(returnStatus(true) );
    }
 	
 	PRINTF("Data type for VecNimArr not currently supported\n");
 	return(returnStatus(false) ) ;
}
Example #6
0
SEXP setNumListRows(SEXP Sextptr, SEXP nRows, SEXP setSize2row1){
    NimVecType *typePtr = static_cast< NimVecType* >(R_ExternalPtrAddr(Sextptr));
    nimType vecType = (*typePtr).getNimType();
    if(vecType == DOUBLE){
    	VecNimArrBase<double> *matPtr = static_cast< VecNimArrBase<double>* >(R_ExternalPtrAddr(Sextptr));
    	int nrowCpp = matPtr->size();
	    int new_size = INTEGER(nRows)[0];
    	matPtr->resize(new_size);
    	if(new_size <= nrowCpp)
    		return(returnStatus(true));
    	if(LOGICAL(setSize2row1)[0]	== TRUE){
		    	NimArrBase<double> *thisRow;
    			thisRow = matPtr->getBasePtr(0);
    			int numDims = thisRow->numDims();
		     	vector<int> Dims(numDims);
 				for(int i = 0; i < numDims; i++)
    	  			Dims[i] = thisRow->dimSize(i);
   		   		for(int i = nrowCpp; i < new_size; i++){
   		   			thisRow = matPtr->getBasePtr(i);
    	  			thisRow->setSize(Dims);
    	  		}
    	return(returnStatus(true) );
    	}
    }
    else if(vecType == INT){
    	VecNimArrBase<int> *matPtr = static_cast< VecNimArrBase<int>* >(R_ExternalPtrAddr(Sextptr));
    	int nrowCpp = matPtr->size();
    	  int new_size = INTEGER(nRows)[0];
	  	matPtr->resize(new_size);
	  	if(new_size <= nrowCpp)
    		return(returnStatus(true));

    	if(LOGICAL(setSize2row1)[0]	== TRUE){
    		NimArrBase<int> *thisRow;
    		thisRow = matPtr->getBasePtr(0);
    		int numDims = thisRow->numDims();
  	    	vector<int> Dims(numDims);
			for(int i = 0; i < numDims; i++)
      			Dims[i] = thisRow->dimSize(i);
      		for(int i = nrowCpp; i < new_size; i++){
      			thisRow = matPtr->getBasePtr(i);
      			thisRow->setSize(Dims);
      		}
      	}
    return(returnStatus(true) );
    }
 	
 	PRINTF("Data type for VecNimArr not currently supported\n");
 	return(returnStatus(false) ) ;
}
Example #7
0
// ######################################################################
V4Lgrabber::V4Lgrabber(OptionManager& mgr, const std::string& descrName,
                       const std::string& tagName,
                       const ParamFlag flags) :
  FrameIstream(mgr, descrName, tagName),
  // NOTE that contrary to the common case, we may give (by default
  // value of 'flags') USE_MY_VAL here when we construct the
  // OModelParam objects; that means that we push our values into the
  // ModelManager as the new default values, rather than having our
  // param take its value from the ModelManager's default
  itsDevName(&OPT_FrameGrabberDevice, this, "/dev/video0", flags), // V4l device
  itsChannel(&OPT_FrameGrabberChannel, this, 1, flags),  // composite input
  itsDims(&OPT_FrameGrabberDims, this, Dims(320, 240), flags),
  itsGrabMode(&OPT_FrameGrabberMode, this, VIDFMT_RGB24, flags), // 24-bit rgb grabbing
  itsByteSwap(&OPT_FrameGrabberByteSwap, this, true, flags), // use byte-swapping
  itsBrightness(&OPT_FrameGrabberBrightness, this, 32768, flags | ALLOW_ONLINE_CHANGES),
  itsHue(&OPT_FrameGrabberHue, this, 32768, flags | ALLOW_ONLINE_CHANGES),
  itsColour(&OPT_FrameGrabberColour, this, 32768, flags | ALLOW_ONLINE_CHANGES),
  itsContrast(&OPT_FrameGrabberContrast, this, 32768, flags | ALLOW_ONLINE_CHANGES),
  itsWhiteness(&OPT_FrameGrabberWhiteness, this, 32768, flags | ALLOW_ONLINE_CHANGES),
  itsStreamingMode(&OPT_FrameGrabberStreaming, this),
  itsFd(-1),
  itsMmapBuf(NULL),
  itsReadBuf(),
  itsTotalBufSize(0),
  itsNumBufFrames(0),
  itsCurrentFrame(0),
  itsGrabbing(NULL),
  itsFrameTime(SimTime::ZERO()),
  itsListener(),
  itsStreamStarted(false)
{
  // request a bunch of camera aliases which work with V4L:
  mgr.requestOptionAlias(&OPT_ALIAScamBttv);
}
Example #8
0
// ######################################################################
XCgrabberFlex::XCgrabberFlex(OptionManager& mgr,
                                 const std::string& descrName,
                                 const std::string& tagName,
                                 const ParamFlag flags) :
  FrameIstream(mgr, descrName, tagName),
  itsDims(&OPT_FrameGrabberDims, this, Dims(1920, 1080), USE_MY_VAL),
  itsGrabMode(&OPT_FrameGrabberMode, this, VIDFMT_BAYER_GB, USE_MY_VAL),
  itsByteSwap(&OPT_FrameGrabberByteSwap, this, false, USE_MY_VAL),
  itsWhiteBalTarR(&OPT_FrameGrabberWhiteBalTargetR, this, false, USE_MY_VAL),
  itsWhiteBalTarG(&OPT_FrameGrabberWhiteBalTargetG, this, false, USE_MY_VAL),
  itsWhiteBalTarB(&OPT_FrameGrabberWhiteBalTargetB, this, false, USE_MY_VAL),
  itsWhiteBalRefR(&OPT_FrameGrabberWhiteBalReferenceR, this, false, USE_MY_VAL),
  itsWhiteBalRefG(&OPT_FrameGrabberWhiteBalReferenceG, this, false, USE_MY_VAL),
  itsWhiteBalRefB(&OPT_FrameGrabberWhiteBalReferenceB, this, false, USE_MY_VAL),
  itsGamma(&OPT_XCFrameGrabberGamma, this, 1.0, USE_MY_VAL | ALLOW_ONLINE_CHANGES),
  itsFPS(&OPT_FrameGrabberFPS, this, 30.0, USE_MY_VAL)
#ifdef HAVE_XCLIB
  ,itsFormatFile(&OPT_XCFormatFileName, this),
  itsCameraOk(false),
  itsImgBuf(NULL)
#endif
{
#ifdef HAVE_XCLIB
  memset(&itsXclib, 0, sizeof(itsXclib));
  itsXclib.ddch.len = sizeof(itsXclib);
  itsXclib.ddch.mos = XCMOS_LIBS;

  itsStatep = NULL;
  itsLastBuf = 0;
#endif
}
Example #9
0
// ######################################################################
void IpcInputFrameSeries::
onSimEventClockTick(SimEventQueue& q, rutz::shared_ptr<SimEventClockTick>& e)
{
	static int first_sim_event_clock = 1;

	if(first_sim_event_clock)
	{
		Image< PixRGB<byte> > rgbimg(Dims(image_width, image_height), ZEROS);
		GenericFrame gf(rgbimg);

		rutz::shared_ptr<SimEventInputFrame>
					ev(new SimEventInputFrame(this, gf, 0));
					q.post(ev);

		first_sim_event_clock = 0;
	}

	if(has_new_frame)
	{
		has_new_frame = 0;

		if (frame.initialized())
		{
			rutz::shared_ptr<SimEventInputFrame>
			ev(new SimEventInputFrame(this, frame, frame_counter));
			q.post(ev);
		}
	}
}
Example #10
0
// ######################################################################
void TaskRelevanceMapGistClassify::integrate(SimEventQueue& q)
{
  if (SeC<SimEventGistOutput> e =
      q.check<SimEventGistOutput>(this))
    {
      if (itsMapComputedForCurrentFrame)
        return;

      //! to make the gist feature value in a reasonable range to get the
      //  variance and determine value
      itsGist = e->gv() / 10.0F;

      gistmatch(reshape(itsGist,
                        Dims(itsGist.getWidth() * itsGist.getHeight(), 1)));

      if (itsCacheSize.getVal() > 0)
        {
          itsTDMap.push_back(itsTmpTDMap);
          if (itsFrame % itsUpdatePeriod.getVal() ==0)
            itsCurrentTDMap = itsTDMap.mean();
        }
      else
        itsCurrentTDMap = itsTmpTDMap;


      itsMap = rescale(itsCurrentTDMap, itsMap.getDims());

      float mi, ma; getMinMax(itsMap, mi, ma);
      LINFO("\nFinal TRM range = [%.2f .. %.2f] -- 1.0 is baseline\n", mi, ma);

      itsMapComputedForCurrentFrame = true;
    }
}
Example #11
0
// ######################################################################
Dims GenericFrame::getDims() const
{
  switch (itsNativeType)
    {
    case NONE:     return Dims();
    case RGB_U8:   return itsRgbU8.getDims();
    case RGB_U16:  return itsRgbU16.getDims();
    case RGB_F32:  return itsRgbF32.getDims();
    case GRAY_U8:  return itsGrayU8.getDims();
    case GRAY_U16: return itsGrayU16.getDims();
    case GRAY_F32: return itsGrayF32.getDims();
    case VIDEO:    return itsVideo.getDims();
    case RGBD:     return itsRgbU8.getDims();
    }

  ASSERT(0); /* can't happen */ return Dims();
}
// ######################################################################
Dims IntegerSimpleChannel::getMapDims() const
{
GVX_TRACE(__PRETTY_FUNCTION__);
  const int lev = itsLevelSpec.getVal().mapLevel();

  return Dims(this->getInputDims().w() / (1 << lev),
              this->getInputDims().h() / (1 << lev));
}
Example #13
0
// ######################################################################
void DirectFeedChannel::reset1()
{
  itsMapDims = Dims();
  itsPyr.reset();
  itsInputTime = SimTime::ZERO();
  itsPyrTime = SimTime::SECS(-1.0);
  itsCoeff.clear();
  itsOutputCache.freeMem();

  ChannelBase::reset1();
}
Example #14
0
// ######################################################################
Dims ResizeSpec::transformDims(const Dims& in)
{
  switch (itsMethod)
    {
    case NOOP:
      return in;
      break;

    case FIXED:
      return itsNewDims;
      break;

    case SCALE_UP:
      // if a scale factor is 0, then that dimension just passes
      // through untouched
      return Dims(itsFactorW > 0.0
                  ? int(0.5 + in.w() * itsFactorW)
                  : in.w(),
                  itsFactorH > 0.0
                  ? int(0.5 + in.h() * itsFactorH)
                  : in.h());
      break;

    case SCALE_DOWN:
      // if a scale factor is 0, then that dimension just passes
      // through untouched
      return Dims(itsFactorW > 0.0
                  ? int(0.5 + in.w() / itsFactorW)
                  : in.w(),
                  itsFactorH > 0.0
                  ? int(0.5 + in.h() / itsFactorH)
                  : in.h());
      break;
    }

  // we should never get here, because even if the user gave bogus
  // input, we should have caught that in convertFromString() or
  // wherever, so that once we have a ResizeSpec object, it should be
  // guaranteed to have a valid itsMethod value:
  ASSERT(0); /* can't happen */ return Dims();
}
Example #15
0
// ######################################################################
GenericFrameSpec XMLInput::peekFrameSpec()
{
  GenericFrameSpec result;

  result.nativeType = GenericFrame::RGB_U8;
  result.videoFormat = VIDFMT_AUTO;
  result.videoByteSwap = false;
  result.dims = Dims(0,0);
  result.floatFlags = 0;

  return result;
}
Example #16
0
// ######################################################################
DescriptorVec::DescriptorVec(OptionManager& mgr,
    const std::string& descrName,
    const std::string& tagName,
    ComplexChannel *cc)
: ModelComponent(mgr, descrName, tagName),
  itsComplexChannel(cc), itsFoveaSize(&OPT_DescriptorVecFOV, this),
  itsFeatureHistogram(100),
  itsFEngine(0),
  itsFV(0)
{
  itsFoveaSize.setVal(Dims(80,80));
}
Example #17
0
// ######################################################################
void TaskRelevanceMapTigs2::inputFrame(const InputFrame& f)
{
  getGistPCAMatrix();
  getImgPCAMatrix();
  getTigsMatrix();
  itsFrame++;

  Image<float> currFrame = rescale(f.grayFloat(),f.getDims()/16, RESCALE_SIMPLE_NOINTERP);
  itsCurrFrameVec = reshape
    (transpose(currFrame/255.0F), Dims(currFrame.getWidth()*currFrame.getHeight(),1));

  itsMapComputedForCurrentFrame = false;
}
Example #18
0
// ######################################################################
void DescriptorVec::buildDV()
{
  const LevelSpec lspec = itsComplexChannel->getModelParamVal<LevelSpec>("LevelSpec");
  const int smlevel = lspec.mapLevel();

  int x=int(itsFoveaLoc.i / double(1 << smlevel) + 0.49);
  int y=int(itsFoveaLoc.j / double(1 << smlevel) + 0.49);

  int foveaW = int(itsFoveaSize.getVal().w() / double(1 << smlevel) + 0.49);
  int foveaH = int(itsFoveaSize.getVal().h() / double(1 << smlevel) + 0.49);

  //Adjest the fovea location so we dont go outside the image
  int tl_x = x - (foveaW/2);
  int tl_y = y - (foveaH/2);


  //Go through all the submaps building the DV
  itsFV.clear(); //clear the FV
  uint numSubmaps = itsComplexChannel->numSubmaps();
  for (uint i = 0; i < numSubmaps; i++)
  {
    Image<float> submap = itsComplexChannel->getSubmap(i);
    //Image<float> submap = itsComplexChannel->getRawCSmap(i);


    //itsFV.push_back(submap.getVal(x,y));

    //get only the fovea region
    if (foveaW < submap.getWidth()) //crop if our fovea is smaller
      submap = crop(submap, Point2D<int>(tl_x, tl_y), Dims(foveaW, foveaH));
    //submap = maxNormalize(submap, 0.0F, 10.0F, VCXNORM_MAXNORM);



    /*   Point2D<int> p; float maxVal, minVal, midVal;
         findMax(submap, p, maxVal);
         findMin(submap, p, minVal);
         midVal = (maxVal-minVal)/2;
         int nParticles = countThresh(submap, 1.0F); //countParticles(submap, 1.0F);
         itsFV.push_back(nParticles); */

    float maxVal; Point2D<int> maxLoc;
    findMax(submap, maxLoc, maxVal);

    //SHOWIMG(rescale(submap, 255, 255));

    itsFV.push_back(maxVal);

  }
}
Example #19
0
// ######################################################################
KinectGrabber::KinectGrabber(OptionManager& mgr, const std::string& descrName,
                             const std::string& tagName, const ParamFlag flags) :
    FrameIstream(mgr, descrName, tagName),
    // NOTE that contrary to the common case, we may give (by default
    // value of 'flags') USE_MY_VAL here when we construct the
    // OModelParam objects; that means that we push our values into the
    // ModelManager as the new default values, rather than having our
    // param take its value from the ModelManager's default
    itsDims(&OPT_FrameGrabberDims, this, Dims(FREENECT_FRAME_W, FREENECT_FRAME_H), flags),
    itsListener(),
    itsDev()
{

}
Example #20
0
// ######################################################################
GenericFrameSpec PngParser::getFrameSpec() const
{
  GenericFrameSpec result;

  if (rep->isGray())       result.nativeType = GenericFrame::GRAY_U8;
  else if (rep->isColor()) result.nativeType = GenericFrame::RGB_U8;
  else rep->onError("unsupported image type (neither grayscale nor RGB)");

  result.videoFormat = VIDFMT_AUTO;
  result.videoByteSwap = false;
  result.dims = Dims(rep->width, rep->height);
  result.floatFlags = 0;

  return result;
}
Example #21
0
// ######################################################################
void BitObject::readFromStream(std::istream& is)
{
  // bounding box
  int t, l, b, r;
  is >> t; is >> l; is >> b; is >> r;
  if (t >= 0)
    itsBoundingBox = Rectangle::tlbrI(t, l, b, r);
  else
    itsBoundingBox = Rectangle();


  // image dims
  int w, h;
  is >> w; is >> h;
  itsImageDims = Dims(w,h);

  // centroid
  itsCentroidXY = Vector2D(is);

  // area
  is >> itsArea;

  // have second moments?
  int hs; is >> hs;
  haveSecondMoments = (hs == 1);

  // second moments
  is >> itsUxx; is >> itsUyy; is >> itsUxy;

  // axes, elongation, angle
  is >> itsMajorAxis; is >> itsMinorAxis;
  is >> itsElongation; is >> itsOriAngle;

  // max, min, avg intensity
  is >> itsMaxIntensity;
  is >> itsMinIntensity;
  is >> itsAvgIntensity;

  // object mask
  PnmParser pp(is);
  itsObjectMask = pp.getFrame().asGray();
  
}
Example #22
0
void NeuralSimModule<T>::setInput(const Image<float>& current, const int layer)
{ 
  Image<float> inp = current;
  
  //reset module if its currently not valid
  if (!itsStructure.is_valid())
    {
      if (itsSCdims.getVal() != Dims(0,0))
        setModel(tagName(), itsSCdims.getVal());  
      else
        setModel(tagName(), current.getDims());  
      
      itsInputDims = current.getDims();
    }

  //rescale input if needed
  inp = rescaleBilinear(inp, itsStructure->getOutDims());
  itsInput[layer+1] = inp;//implicit conversion frol Image<float> to image<double>
}
Example #23
0
// ######################################################################
void BitObject::freeMem()
{
  itsObjectMask.freeMem();
  itsBoundingBox = Rectangle();
  itsCentroidXY = Vector2D();
  itsArea = 0;
  itsSMV = 0.;
  itsUxx = 0.0F;
  itsUyy = 0.0F;
  itsUxy = 0.0F;
  itsMajorAxis = 0.0F;
  itsMinorAxis = 0.0F;
  itsElongation = 0.0F;
  itsOriAngle = 0.0F;
  itsImageDims = Dims(0,0);
  itsMaxIntensity = -1.0F;
  itsMinIntensity = -1.0F;
  itsAvgIntensity = -1.0F;
  haveSecondMoments = false;
}
Example #24
0
// ######################################################################
void DirectFeedChannel::doInput(const InputFrame& inframe)
{
  if (inframe.grayFloat().initialized()) LINFO("using bwimg");
  else if (inframe.colorFloat().initialized()) LINFO("using colimg");
  else LFATAL("Need to have either colimg or bwimg as input!");

  itsInputTime = inframe.time();

  LDEBUG("itsInputTime: %fms", itsInputTime.msecs());

  if (itsInputTime != itsPyrTime)
    LFATAL("I don't have any direct-feed input for time=%fms "
           "(last input was at time=%fms)",
           itsInputTime.msecs(), itsPyrTime.msecs());

  const float fac = pow(0.5f,float(itsMapLevel.getVal()));
  itsMapDims = Dims(int(this->getInputDims().w()*fac),
                    int(this->getInputDims().h()*fac));
  LDEBUG("itsMapDims = %s; itsInputDims = %s",toStr(itsMapDims).c_str(),
        toStr(this->getInputDims()).c_str());
}
Example #25
0
DetectionParameters::DetectionParameters()
: itsMaxEvolveTime(DEFAULT_MAX_EVOLVE_TIME),
itsMaxWTAPoints(DEFAULT_MAX_WTA_POINTS),
itsSaveNonInteresting(DEFAULT_SAVE_NON_INTERESTING),
itsSaveOriginalFrameSpec(DEFAULT_SAVE_ORG_FRAME_SPEC),
itsEventExpirationFrames(0),
itsTrackingMode(DEFAULT_TRACKING_MODE),
itsColorSpaceType(DEFAULT_COLOR_SPACE),
itsMinStdDev(DEFAULT_MIN_STD_DEV),
itsMaxDist(40),
itsMaxEventFrames(DEFAULT_MAX_EVENT_FRAMES),
itsMinEventFrames(DEFAULT_MIN_EVENT_FRAMES),
itsMaxEventArea(0),
itsMinEventArea(0),
itsSaliencyFrameDist(DEFAULT_SALIENCY_FRAME_DIST),
itsMaskPath(""),
itsSizeAvgCache(DEFAULT_SIZE_AVG_CACHE),
itsMaskXPosition(DEFAULT_MASK_X_POSITION),
itsMaskYPosition(DEFAULT_MASK_Y_POSITION),
itsMaskWidth(DEFAULT_MASK_HEIGHT),
itsMaskHeight(DEFAULT_MASK_WIDTH),
itsRescaleSaliency(Dims(0,0)),
itsUseFoaMaskRegion(DEFAULT_FOA_MASK_REGION),
itsSegmentAlgorithmType(DEFAULT_SEGMENT_ALGORITHM_TYPE),
itsSegmentAlgorithmInputType(DEFAULT_SEGMENT_ALGORITHM_INPUT_TYPE),
itsSegmentGraphParameters(DEFAULT_SEGMENT_GRAPH_PARAMETERS),
itsSegmentAdaptiveParameters(DEFAULT_SEGMENT_ADAPTIVE_PARAMETERS),
itsCleanupStructureElementSize(DEFAULT_SE_SIZE),
itsSaliencyInputType(DEFAULT_SALIENCY_INPUT_TYPE),
itsKeepWTABoring(DEFAULT_KEEP_WTA_BORING),
itsMaskDynamic(DEFAULT_DYNAMIC_MASK),
itsMaskLasers(DEFAULT_MASK_GRAPHCUT),
itsXKalmanFilterParameters(DEFAULT_KALMAN_PARAMETERS),
itsYKalmanFilterParameters(DEFAULT_KALMAN_PARAMETERS)
{
    //initialize with some defaults
    float maxDist = itsMaxDist;
    float maxAreaDiff = pow((double) maxDist, 2) / (double) 4.0;
    itsMaxCost = pow((double) maxDist, 2) + pow((double) maxAreaDiff, 2);
}
Example #26
0
void BitObject::drawBoundingBox(Image<T_or_RGB>& img, 
                                const T_or_RGB& color,
                                float opacity)
{
  ASSERT(isValid());
  ASSERT(img.initialized());
  Rectangle bbox = itsBoundingBox;

  // rescale if needed
  if (img.getDims() != itsImageDims) {
    Dims d = img.getDims();
    float scaleW = (float) d.w() / (float) itsImageDims.w();
    float scaleH = (float) d.h() / (float) itsImageDims.h();
    int i = (int) ((float) bbox.left() * scaleW);
    int j = (int) ((float) bbox.top() * scaleH);
    int w = (int) ((float) bbox.width() * scaleW);
    int h = (int) ((float) bbox.height() *scaleH);
    const Point2D<int> topleft(i,j);
    bbox = Rectangle(topleft, Dims(w,h));
  }

  float op2 = 1.0F - opacity;
  int t = bbox.top();
  int b = bbox.bottomI();
  int l = bbox.left();
  int r = bbox.rightI();
  
  for (int x = l; x <= r; ++x)
    {
      Point2D<int> p1(x,t), p2(x,b);
      img.setVal(p1,img.getVal(p1) * op2 + color * opacity);
      img.setVal(p2,img.getVal(p2) * op2 + color * opacity);
    }
  for (int y = t+1; y < b; ++y)
    {
      Point2D<int> p1(l,y), p2(r,y);
      img.setVal(p1,img.getVal(p1) * op2 + color * opacity);
      img.setVal(p2,img.getVal(p2) * op2 + color * opacity);
    }
}
Example #27
0
void BitObject::drawShape(Image<T_or_RGB>& img, 
                          const T_or_RGB& color,
                          float opacity)
{
  ASSERT(isValid());
  ASSERT(img.initialized());
  Dims d = img.getDims();
  Image<byte> mask = itsObjectMask;
  Rectangle bbox = itsBoundingBox;

  // rescale if needed
  if (d != itsImageDims) {
    float scaleW = (float) d.w() / (float) itsImageDims.w();
    float scaleH = (float) d.h() / (float) itsImageDims.h();
    int i = (int) ((float) bbox.left() * scaleW);
    int j = (int) ((float) bbox.top() * scaleH);
    int w = (int) ((float) bbox.width() * scaleW);
    int h = (int) ((float) bbox.height() *scaleH);
    const Point2D<int> topleft(i,j);
    bbox = Rectangle(topleft, Dims(w,h));
    mask = rescaleNI(mask, d.w(), d.h());
  }

  int w = img.getWidth();
  float op2 = 1.0F - opacity;

  typename Image<T_or_RGB>::iterator iptr, iptr2;
  Image<byte>::const_iterator mptr = mask.begin();
  iptr2 = img.beginw() + bbox.top() * w + bbox.left();
  for (int y = bbox.top(); y <= bbox.bottomI(); ++y)
    {
      iptr = iptr2;
      for (int x = bbox.left(); x <= bbox.rightI(); ++x)
        {
          if (*mptr > 0) *iptr = T_or_RGB(*iptr * op2 + color * opacity);
          ++iptr; ++mptr;
        }
      iptr2 += w;
    }
}
Example #28
0
/*
The default layout is 1080p(1920x1080)

                              1280             1920
+-------------------------------+---------------+
|                               |               |
|                               |    Global     |
|                               |   Localizer   |
|                               |               |
|           HandyCam            |               |
|                               +---------------+ 440
|                               |               |
|                               |               |
|                               |               |
+----------------+--------------+   Navigator   | 720
|                |              |               |
|   RoadFinder   |   Saliency   |               |
|                |    Match     |               |
|                |              |               |
+----------------+--------------+---------------+ 1080
               720            1280
*/
BeoVisualizer::BeoVisualizer(OptionManager& mgr,
               const std::string& descrName, const std::string& tagName) :
  RobotBrainComponent(mgr, descrName, tagName),
  itsTimer(1000000),
  itsCurrMessageID(0),
  itsOfs(new OutputFrameSeries(mgr))
{
  addSubComponent(itsOfs);

  itsDispImage   = Image<PixRGB<byte> >(IMG_WIDTH,IMG_HEIGHT,ZEROS);
  itsHandycam  = Panel(Point2D<int>(0,0) ,Dims(1280,720), "HandyCam");
  itsRoadFinder= Panel(itsHandycam.bl()  ,Dims(1280,360), "RoadFinder");
  itsImageMatch= Panel(itsRoadFinder.tr(),Dims(560 ,360), "ImageMatch");
  itsLocalizer = Panel(itsHandycam.tr()  ,Dims(640 ,440), "Global Map");
  itsNavigator = Panel(itsLocalizer.bl() ,Dims(640 ,640), "Local Map");
  itsPilot= Panel(itsLocalizer.bl() ,Dims(640 ,640), "Pilot");
  itsSim = Panel(itsLocalizer.bl() ,Dims(640 ,400), "Pilot");
  

  //fake image just for display test FIXXXXXX
  //Image<PixRGB<byte> > fakeHandyCamImg = Raster::ReadRGB("HandyCam.png");
  //itsHandycam.updateImage(fakeHandyCamImg);
}
Example #29
0
int main(const int argc, const char **argv)
{

  MYLOGVERB = LOG_INFO;
  mgr = new ModelManager("Test ObjRec");

  nub::soft_ref<SimEventQueueConfigurator>
    seqc(new SimEventQueueConfigurator(*mgr));
  mgr->addSubComponent(seqc);

  //our brain
  nub::ref<StdBrain>  brain(new StdBrain(*mgr));
  mgr->addSubComponent(brain);

  mgr->exportOptions(MC_RECURSE);
  mgr->setOptionValString(&OPT_VisualCortexType, "IOC");
  //mgr.setOptionValString(&OPT_VisualCortexType, "I");
  //mgr->setOptionValString(&OPT_VisualCortexType, "GNO");
  //mgr.setOptionValString(&OPT_VisualCortexType, "N");
  //manager.setOptionValString(&OPT_UseOlderVersion, "false");
  // set the FOA and fovea radii
  mgr->setOptionValString(&OPT_SaliencyMapType, "Fast");
  mgr->setOptionValString(&OPT_SMfastInputCoeff, "1");

  mgr->setOptionValString(&OPT_WinnerTakeAllType, "Fast");
  mgr->setOptionValString(&OPT_SimulationTimeStep, "0.2");

  mgr->setModelParamVal("FOAradius", 50, MC_RECURSE);
  mgr->setModelParamVal("FoveaRadius", 50, MC_RECURSE);


  mgr->setOptionValString(&OPT_IORtype, "Disc");

  if (mgr->parseCommandLine(
        (const int)argc, (const char**)argv, "<Network file> <server ip>", 2, 2) == false)
    return 1;

  // catch signals and redirect them to terminate for clean exit:
  signal(SIGHUP, terminateProc); signal(SIGINT, terminateProc);
  signal(SIGQUIT, terminateProc); signal(SIGTERM, terminateProc);
  signal(SIGALRM, terminateProc);

  mgr->start();

  ComplexChannel *cc =
    &*dynCastWeak<ComplexChannel>(brain->getVC());

  //Get a new descriptor vector
  DescriptorVec descVec(*mgr, "Descriptor Vector", "DecscriptorVec", cc);
  //Get  new classifier
  Bayes bayesNet(descVec.getFVSize(), 0);

  //get command line options
  const char *bayesNetFile = mgr->getExtraArg(0).c_str();
  const char *server_ip = mgr->getExtraArg(1).c_str();
  bool train = false;

  int foveaRadius = mgr->getModelParamVal<int>("FoveaRadius", MC_RECURSE);

  printf("Setting fovea to %i, train = %i\n", foveaRadius, train);

  //load the network if testing
  //if (!train)
    bayesNet.load(bayesNetFile);

  descVec.setFoveaSize(foveaRadius);

  xwin  = new XWinManaged(Dims(256,256),
      -1, -1, "ILab Robot Head Demo");


  server = nv2_label_server_create(9930,
        server_ip,
        9931);

  nv2_label_server_set_verbosity(server,1); //allow warnings


  int send_interval = 1;

  while(!terminate)
  {
    double prob = 0, statSig = 0;

    Point2D clickLoc = xwin->getLastMouseClick();
    if (clickLoc.isValid())
      train = !train;

    struct nv2_image_patch p;
    const enum nv2_image_patch_result res =
      nv2_label_server_get_current_patch(server, &p);

    std::string objName = "nomatch";
    if (res == NV2_IMAGE_PATCH_END)
    {
      fprintf(stdout, "ok, quitting\n");
      break;
    }
    else if (res == NV2_IMAGE_PATCH_NONE)
    {
      usleep(10000);
      continue;
    }
    else if (res == NV2_IMAGE_PATCH_VALID &&
       p.type == NV2_PIXEL_TYPE_RGB24)
    {
      printf("Valid patch %s %ix%i\n", p.training_label,
          p.width, p.height);

      //showimg
      Image<PixRGB<byte> > img(p.width, p.height, NO_INIT);
     // unsigned char *imgPtr = const_cast<unsigned char*>
     //   (reinterpret_cast<const unsigned char*>(img.getArrayPtr()));

      memcpy(img.getArrayPtr(), p.data, p.width*p.height*3);

      Image<PixRGB<byte> > objImg = rescale(img, 256, 256);

      int cls = classifyImage(objImg, descVec, bayesNet, &prob, &statSig);
      if (cls != -1 && prob > -150)
        objName = bayesNet.getClassName(cls);
      else
        objName = "nomatch";

      printf("This is %s: Class %i prob %f\n",
          objName.c_str(), cls, prob);


     // if (strcmp(p.training_label, "none") != 0 &&
     //     false) { //training
     if (cls == -1)
     {
        printf("Can you tell me what this is?\n");
        std::getline(std::cin, objName);
        learnImage(objImg, 0, descVec, bayesNet, objName.c_str());
        bayesNet.save(bayesNetFile);
      } else {
        printf("Is this a %s?\n", objName.c_str());

        if (train)
        {
          std::string tmp;
          std::getline(std::cin, tmp);
          if (tmp != "")
            objName = tmp;

          LINFO("Learning %s\n", objName.c_str());
          fflush(stdout);

          learnImage(objImg, 0, descVec, bayesNet, objName.c_str());
          bayesNet.save(bayesNetFile);
        }

      }

    }

    if (objName != "nomatch")
    {
      printf("Object is %s\n", objName.c_str());

      struct nv2_patch_label l;
      l.protocol_version = NV2_LABEL_PROTOCOL_VERSION;
      l.patch_id = p.id;
      snprintf(l.source, sizeof(l.source), "%s",
          "ObjRec");
      snprintf(l.name, sizeof(l.name), "%s", // (%ux%u #%u)",
          objName.c_str());
      //(unsigned int) p.width,
      //(unsigned int) p.height,
      //(unsigned int) p.id);
      snprintf(l.extra_info, sizeof(l.extra_info),
          "%i", (int)statSig);

      if (l.patch_id % send_interval == 0)
      {
        nv2_label_server_send_label(server, &l);

        fprintf(stdout, "sent label '%s (%s)'\n",
            l.name, l.extra_info);
      }
      else
      {
        fprintf(stdout, "DROPPED label '%s (%s)'\n",
            l.name, l.extra_info);
      }
    }

    nv2_image_patch_destroy(&p);
  }

  nv2_label_server_destroy(server);

}
Example #30
0
// ######################################################################
std::list<BitObject> ObjectDetection::run(nub::soft_ref<MbariResultViewer> rv,
    const std::list<Winner> &winlist,
    const Image< PixRGB<byte> > &segmentInImg)
{
    DetectionParameters p = DetectionParametersSingleton::instance()->itsParameters;
    std::list<BitObject> bosFiltered;
    std::list<BitObject> bosUnfiltered;
    std::list<Winner>::const_iterator iter = winlist.begin();

    //go through each winner and extract salient objects
    while (iter != winlist.end()) {

        // get the foa mask
        BitObject boFOA = (*iter).getBitObject();
        WTAwinner winner = (*iter).getWTAwinner();

        // if the foa mask area is too small, we aren't going to find any large enough objects so bail out
        if (boFOA.getArea() <  p.itsMinEventArea) {
            iter++;
            continue;
        }

        // if only using the foamask region and not the foamask to guide the detection
        if (p.itsUseFoaMaskRegion) {
            LINFO("----------------->Using FOA mask region");
            Rectangle foaregion = boFOA.getBoundingBox();
            Point2D<int> center = boFOA.getCentroid();
            Dims d = segmentInImg.getDims();
            Dims segmentDims = Dims((float)foaregion.width()*2.0,(float)foaregion.height()*2.0);
            Dims searchDims = Dims((float)foaregion.width(),(float)foaregion.height());
            Rectangle searchRegion = Rectangle::centerDims(center, searchDims);
            searchRegion = searchRegion.getOverlap(Rectangle(Point2D<int>(0, 0), segmentInImg.getDims() - 1));
            Rectangle segmentRegion = Rectangle::centerDims(center, segmentDims);
            segmentRegion = segmentRegion.getOverlap(Rectangle(Point2D<int>(0, 0), segmentInImg.getDims() - 1));

            // get the region used for searching for a match based on the foa region
            LINFO("Extracting bit objects from frame %d winning point %d %d/region %s minSize %d maxSize %d %d %d", \
                   (*iter).getFrameNum(), winner.p.i, winner.p.j, convertToString(searchRegion).c_str(), p.itsMinEventArea,
                    p.itsMaxEventArea, d.w(), d.h());

            std::list<BitObject> sobjs = extractBitObjects(segmentInImg, center, searchRegion, segmentRegion, (float)p.itsMinEventArea/2.F, p.itsMaxEventArea);
            std::list<BitObject> sobjsKeep;

            // need at least two objects to find a match, otherwise just background
            if (sobjs.size() > 1 ) {

                // set the winning voltage for each winning bit object
                std::list<BitObject>::iterator iter;
                for (iter = sobjs.begin(); iter != sobjs.end(); ++iter) {
                    if ( (*iter).getArea() >= p.itsMaxEventArea && (*iter).getArea() <= p.itsMaxEventArea) {
                        (*iter).setSMV(winner.sv);
                        sobjsKeep.push_back(*iter);
                    }
                }

                // add to the list
                bosUnfiltered.splice(bosUnfiltered.begin(), sobjsKeep);
            }
            else {
                LINFO("Can't find bit object, checking FOA mask");
                if (boFOA.getArea() >= p.itsMinEventArea && boFOA.getArea() <= p.itsMaxEventArea) {
                    boFOA.setSMV(winner.sv);
                    sobjsKeep.push_back(boFOA);
                    bosUnfiltered.splice(bosUnfiltered.begin(), sobjsKeep);
                    LINFO("FOA mask ok %d < %d < %d", p.itsMinEventArea, boFOA.getArea(), p.itsMaxEventArea);
                }
                else
                    LINFO("FOA mask too large %d > %d or %d > %d",boFOA.getArea(), p.itsMinEventArea,
                    boFOA.getArea(), p.itsMaxEventArea);
                    }
        }
        else {

            LINFO("----------------->Using FOA mask only");
            if (boFOA.getArea() >= p.itsMinEventArea && boFOA.getArea() <= p.itsMaxEventArea) {
                boFOA.setSMV(winner.sv);
                bosUnfiltered.push_back(boFOA);
                LINFO("FOA mask ok %d < %d < %d", p.itsMinEventArea, boFOA.getArea(), p.itsMaxEventArea);
            }
            else
                LINFO("FOA mask too large %d > %d or %d > %d",boFOA.getArea(), p.itsMinEventArea,
                boFOA.getArea(), p.itsMaxEventArea);
        }

        iter++;
    }// end while iter != winners.end()

    LINFO("Found %i bitobject(s)", bosUnfiltered.size());

    bool found;
    int minSize = p.itsMaxEventArea;

    // loop until we find all non-overlapping objects starting with the smallest
    while (!bosUnfiltered.empty()) {

        std::list<BitObject>::iterator biter, siter, smallest;
        // find the smallest object
        smallest = bosUnfiltered.begin();
        for (siter = bosUnfiltered.begin(); siter != bosUnfiltered.end(); ++siter)
            if (siter->getArea() < minSize) {
                minSize = siter->getArea();
                smallest = siter;
            }

        // does the smallest object intersect with any of the already stored ones
        found = true;
        for (biter = bosFiltered.begin(); biter != bosFiltered.end(); ++biter) {
            if (smallest->isValid() && biter->isValid() && biter->doesIntersect(*smallest)) {
                // no need to store intersecting objects -> get rid of smallest
                // and look for the next smallest
                bosUnfiltered.erase(smallest);
                found = false;
                break;
            }
        }

        if (found && smallest->isValid())
            bosFiltered.push_back(*smallest);
    }


    LINFO("Found total %d non intersecting objects", bosFiltered.size());
    return bosFiltered;
}