示例#1
0
void HRP2ColorDetectionProcess::ReadRobotMatrix()
{
  double aFB[16];
  m_LLVS->GetMatrixHeadTOrg(aFB);
  for(int i=0;i<4;i++)
    {
      for(int j=0;j<4;j++)
	m_HtO[i][j] =  aFB[i*4+j];
    }
  ODEBUG("m_HtO : " << m_HtO);
}
示例#2
0
文件: action-grab.cpp 项目: jehc/llvc
  ActionGrab::ActionGrab(bool rectification,
			 bool triggerMode)
    : LLVClient::ActionWithLLVSBase(),
      m_triggerMode(triggerMode),
      m_LLVS(GetServicePort<LowLevelVisionSystem,
	     LowLevelVisionSystem_var>()),
      m_image(),
      m_cameraID(3),
      m_format((rectification)
	       ? "RGB_VISPU8" : "RGB_VISPU8_NONE")
  {
    ODEBUG("triggerMode :" <<m_triggerMode);
    ODEBUG("format :" <<m_format);
    if (m_triggerMode)
      m_LLVS->SetSynchronizationMode(LowLevelVisionSystem::SYNCHRO_TRIGGER);
    else
      m_LLVS->SetSynchronizationMode(LowLevelVisionSystem::SYNCHRO_FLOW);

    // Set triggering mode.
    m_LLVS->TriggerSynchro();
    sleep(2);
    m_LLVS->TriggerSynchro();

    m_LLVS->SetAProcessParameterAndValue
      (CORBA::string_dup("IEEE1394 Image grabbing"),
       CORBA::string_dup("Format"),
       CORBA::string_dup("RGB"));
    m_LLVS->SetAProcessParameterAndValue
      (CORBA::string_dup("vispUndistordedProcess"),
       CORBA::string_dup("TYPE_CONV"),
       CORBA::string_dup(m_format.c_str()));

    m_LLVS->StartProcess("IEEE1394 Image grabbing");

    try
      {
	m_LLVS->StartProcess("vispUndistordedProcess");
      }
    catch(...)
      {
	std::cerr<< "Starting the vispUndistortedProcess." << std::endl;
      }

    CORBA::Long lw,lh;
    try
      {
	m_LLVS->GetImagesGrabbedSize(m_cameraID,lw,lh);
      }
    catch(...)
      {
	std::cerr<< "Unable to GetImagesGrabbedSize LLVS" << std::endl;
      }

    long imWidth = lw;
    long imHeight = lh;
    m_image.resize(imHeight,imWidth);

    //---- FIXME : this might be better not to have this written like that
    const std::string camParamPath="./data/ViSP/hrp2CamParam/hrp2.xml";
    const std::string camName="cam1394_3_rectif";
    const vpCameraParameters::vpCameraParametersProjType proj=
      vpCameraParameters::perspectiveProjWithoutDistortion;
    vpXmlParserCamera camParser;
    if(camParser.parse(m_cam,
		      camParamPath.c_str(),
		      camName,
		      proj,
		      imWidth,
		       imHeight)!=vpXmlParserCamera::SEQUENCE_OK)
      throw "Fail to open or parse camera parameter file";
  }
    int ParseVRMLFile(MultiBody *aMB,
		      std::string aFileName,
		      vector<BodyGeometricalData> &aListOfURLs,
		      bool ReadGeometry)
    {
      if (aFileName == std::string("")) {
	std::cerr << "SpiritVRMLReader: Filename is empty." << std::endl;
	return 0;
      }
      ifstream aif;

      SkipGrammar aSkipGrammar;
      SpiritOpenHRP< Actions > aSpiritOpenHRP;

      aif.open(aFileName.c_str(),ifstream::in|ifstream::binary);
      if (!aif.is_open())
	{
	  cout << "Unable to open file "<< aFileName<< endl;
	  return 0;
	}
      else {
	/*	if (aSpiritOpenHRP.getVerbose()>10)
		{
		cout << "Succeeded in opening " << aFileName <<endl;
		} */
      }

      aif.open(aFileName.c_str(),ifstream::in|ifstream::binary);

      typedef multi_pass<istreambuf_iterator<char> > multi_pass_iterator_t;
      typedef istream::char_type char_t;

      multi_pass_iterator_t
        in_begin(make_multi_pass(istreambuf_iterator<char_t>(aif))),
        in_end(make_multi_pass(istreambuf_iterator<char_t>()));

      typedef position_iterator<multi_pass_iterator_t> iterator_t;

      iterator_t first(in_begin, in_end, aFileName), last;

      if (!parse(first,last,aSpiritOpenHRP,aSkipGrammar).full)
	{
	  file_position fp_cur;

	  // Store the current file position
	  fp_cur = last.get_position();
	  ODEBUG("Display - Current file: " << fp_cur.file );
	  ODEBUG( "Line   : " << fp_cur.line
		  << " Column : " << fp_cur.column
		  << endl);

	}
      aif.close();

      if (ReadGeometry)
	{
	  // Iterate over the included files if there is some.
	  vector<BodyGeometricalData*> aLOU = aSpiritOpenHRP.actions.m_DataForParsing.m_ListOfURLs;

	  string Path;
	  unsigned int npos = aFileName.find_last_of('/');
	  Path = aFileName.substr(0,npos+1);

	  ODEBUG( "Path: " << Path
		  << " Size of m_ListOfURLs: "
		  << aSpiritOpenHRP.actions.m_DataForParsing.m_ListOfURLs.size());

	  for(unsigned int iIndexBDG=0;
	      iIndexBDG<aLOU.size();
	      iIndexBDG++)
	    {
	      const vector<string > URLs = aLOU[iIndexBDG]->getURLs();
	      ODEBUG(" i: " << iIndexBDG << " URLs.size():" << URLs.size());
	      for(unsigned int j=0;j<URLs.size();j++)
		{
		  string GeomFileName = Path + URLs[j];
		  aif.open(GeomFileName.c_str(),ifstream::in|ifstream::binary);

		  if (!aif.is_open())
		    {
		      ODEBUG(" Unable to open :" << GeomFileName );
		    }
		  else
		    {
		      ODEBUG3( " Open :" << GeomFileName );
		      multi_pass_iterator_t
			lin_begin(make_multi_pass(istreambuf_iterator<char_t>(aif))),
			lin_end(make_multi_pass(istreambuf_iterator<char_t>()));

		      iterator_t lfirst(lin_begin, lin_end, URLs[j]), llast;
		      *aSpiritOpenHRP.actions.m_DataForParsing.m_LOUIndex =
			iIndexBDG;
		      if (!parse(lfirst,llast,aSpiritOpenHRP,aSkipGrammar).full)
			{
			  file_position fp_cur;

			  // Store the current file position
			  fp_cur = lfirst.get_position();
			  ODEBUG("Display - Current file: " << fp_cur.file );
			  ODEBUG( "Line   : " << fp_cur.line
				   << " Column : " << fp_cur.column
				   << endl);
			}
		      ODEBUG3(" Finished");
		    }
		  aif.close();
		}
	    }

	  // Copy the list of URLS.
	  aListOfURLs.resize(aLOU.size());
	  vector<BodyGeometricalData *>::iterator it_BGD = aLOU.begin();
	  unsigned int i=0;
	  while (it_BGD!= aLOU.end())
	    {
	      aListOfURLs[i] = *(*it_BGD);
	      i++;
	      it_BGD++;
	    }
	};

      // Copy multibody structure.
      *aMB = aSpiritOpenHRP.actions.m_DataForParsing.m_MultiBody;


      return 1;
    }
示例#4
0
int HRP2ColorDetectionProcess::RealizeTheProcess()
{
  if (!m_Computing)
    return 0;

  unsigned int lw,lh;
  double x[m_NbOfCameras],y[m_NbOfCameras];
  int x1[2]={-1,-1},x2[2]={-1,-1};
  double a3DPt[3];
  VNL::Vector<double> Q(4);
  VNL::Vector<double> QHead(3);
  if (m_FirstTime)
    {
      ReadRobotMatrix();
      m_FirstTime = false;
    }

  for(int i=0;i<m_NbOfCameras;i++)
    {
      m_ImageInputRGB[i]->GetSize(lw,lh);
      /* VW::ImageConversions::RGBBuffer_to_VWImage((char *)m_InputImage[i].Image,
	 m_ImageInputRGB[i],lw,lh);*/

      m_ColorDetector[i].FilterOnHistogram((unsigned char *)m_InputImage[i].Image,m_ImageResult[i]);


      {
	char Buffer[1024];
	sprintf(Buffer,"IntermediateImageResult_%03d.ppm",i);
	m_ImageResult[i]->WriteImage(Buffer);
      }

      m_ColorDetector[i].ComputeCoG(m_ImageResult[i],x[i],y[i]);

      if ((i==1) && (x[0]>=0.0) && (x[1]>=0.0)
	  && (y[0]>=0.0) && (y[1]>=0.0))
	{

	  x1[0] = (int) x[0]; x1[1] = (int)y[0];
	  x2[0] = (int) x[1]; x2[1] = (int)y[1];
	  m_TM.Triangulation(*m_PI[0],*m_PI[1],x1,x2,a3DPt);
	  for(int j=0;j<3;j++)
	    Q[j] = a3DPt[j];
	  Q[3] = 1.0;

	  QHead = m_HtO * Q;
	  QHead *= 0.001;
	  ODEBUG( "Triangulation: "
		   << a3DPt[0] << " "
		   << a3DPt[1] << " "
		   << a3DPt[2]);
	  ODEBUG3("QHead :" << QHead <<endl );


#if 0
	  if (!CORBA::is_nil(m_VisualServoing))
	    {
	      try
		{
		  // m_VisualServoing->SendTarget2DPosition(x[i],y[i]);
		  CORBA::Double x_left = x[0];
		  CORBA::Double y_left = y[0];
		  CORBA::Double X = QHead[0];
		  CORBA::Double Y = QHead[1];
		  CORBA::Double Z = QHead[2];

		  m_VisualServoing->SendTarget3DPosition(x_left,y_left,X,Y,Z);
		  ODEBUG("Send " << x << " " << y << " to visual servoing");
		}

	      catch(...)
		{

		  ODEBUG("Sorry the data was not send to the visual servoing plugin" );
		  if (m_NbOfProcessWithoutTrialConnection>m_IntervalBetweenConnectionTrials)
		    {
		      CORBA::release(m_VisualServoing);
		      TryConnectionToVisualServoing();
		      m_NbOfProcessWithoutTrialConnection=0;
		    }
		  else
		    m_NbOfProcessWithoutTrialConnection++;

		}
	    }
	  else
	    {
	      if (m_NbOfProcessWithoutTrialConnection>m_IntervalBetweenConnectionTrials)
		{
		  TryConnectionToVisualServoing();
		  m_NbOfProcessWithoutTrialConnection=0;
		}
	      else
		m_NbOfProcessWithoutTrialConnection++;
	    }
#endif
	}
      /*
      if ((x==-1) && (y==-1))
	{
	  epbm_save("ImageInput.epbm",&m_InputImage[i],0);
	  m_ImageResult[i]->WriteImage("ImageIntermediate.pgm");
	  exit(0);
	  //m_ImageInputRGB[0]->WriteImage("ImageInput.ppm");
	}
	ODEBUG3("Center ( " << i << " ) :"<< x << " " << y ); */
    }
  return 0;
}