SpecialActions::SpecialActions() :
  wasEndOfSpecialAction(false),
  //stiffnessInterpolationStart(0),
  stiffnessInterpolationCounter(0),
  stiffnessInterpolationLength(0),
  wasActive(false),
  dataRepetitionCounter(0),
  lastSpecialAction(SpecialActionRequest::numOfSpecialActionIDs),
  mirror(false)
{
  theInstance = this;

  std::vector<float> motionData;
  char errorBuffer[10000];
  MofCompiler* mofCompiler = new MofCompiler;
  if(!mofCompiler->compileMofs(errorBuffer, sizeof(errorBuffer), motionData))
    OUTPUT_TEXT("Error while parsing mof files:");
  else
    motionNetData.load(motionData);
  delete mofCompiler;

  if(*errorBuffer)
    OUTPUT_TEXT("  " << errorBuffer);

  // create an uninitialised motion request to set startup motion
  currentNode = motionNetData.label_extern_start[SpecialActionRequest().specialAction];

  // read entries from file
  InMapFile cm("specialActions.cfg");
  if(!cm.exists())
  {
    OUTPUT_ERROR("'specialActions.cfg' not found.");
  }
  else
  {
    OdometryParams infos;
    cm >> infos;
    for(std::vector<SpecialActionInfo>::const_iterator it = infos.specialActionInfos.begin(); it != infos.specialActionInfos.end(); it++)
    {
      infoTable[it->id] = SpecialActionInfo(*it);
      if(it->type == SpecialActionInfo::once || it->type == SpecialActionInfo::homogeneous)
      {
        infoTable[it->id].odometryOffset.rotation = it->odometryOffset.rotation;
        if(it->type == SpecialActionInfo::homogeneous)
        {
          // convert from mm/seconds to mm/tick
          float motionCycleTime = theFrameInfo.cycleTime;
          infoTable[it->type].odometryOffset.translation.x() *= motionCycleTime;
          infoTable[it->type].odometryOffset.translation.y() *= motionCycleTime;
          // convert from rad/seconds to rad/tick
          infoTable[it->type].odometryOffset.rotation *= motionCycleTime;
        }
      }
    }
  }
}
void AutomaticCameraCalibrator::optimize()
{
  if(optimizer == nullptr)
  {
    // since the parameters for the robot pose are correction parameters,
    // an empty RobotPose is used instead of theRobotPose
    optimizationParameters = pack(theCameraCalibration, Pose2f());
    optimizer = new GaussNewtonOptimizer<numOfParameterTranslations>(functor);
    successiveConvergations = 0;
    framesToWait = 0;
  }
  else
  {
    // only do an iteration after some frames have passed
    if(framesToWait <= 0)
    {
      framesToWait = numOfFramesToWait;
      const float delta = optimizer->iterate(optimizationParameters, Parameters::Constant(0.0001f));
      if(!std::isfinite(delta))
      {
        OUTPUT_TEXT("Restart optimize! An optimization error occured!");
        delete optimizer;
        optimizer = nullptr;
        state = Accumulate;
      }
      OUTPUT_TEXT("AutomaticCameraCalibrator: delta = " << delta);

      // the camera calibration is refreshed from the current optimizer state
      Pose2f robotPoseCorrection;
      unpack(optimizationParameters, nextCameraCalibration, robotPoseCorrection);

      if(std::abs(delta) < terminationCriterion)
        ++successiveConvergations;
      else
        successiveConvergations = 0;
      if(successiveConvergations >= minSuccessiveConvergations)
      {
        OUTPUT_TEXT("AutomaticCameraCalibrator: converged!");
        OUTPUT_TEXT("RobotPoseCorrection: " << robotPoseCorrection.translation.x() * 1000.0f
                    << " " << robotPoseCorrection.translation.y() * 1000.0f
                    << " " << robotPoseCorrection.rotation.toDegrees() << "deg");
        currentRobotPose.translation.x() += robotPoseCorrection.translation.x() * 1000.0f;
        currentRobotPose.translation.y() += robotPoseCorrection.translation.y() * 1000.0f;
        currentRobotPose.rotation = Angle::normalize(currentRobotPose.rotation + robotPoseCorrection.rotation);
        abort();

        if(setJointOffsets)
          invert(theCameraCalibration);
      }
    }
    --framesToWait;
  }
}
void AutomaticCameraCalibrator::moveHead()
{
  ASSERT(headSpeed >= 0.3);
  if(firstHeadPans.empty() && secondHeadPans.empty())
  {
    currentHeadPan = 0.0f;
    OUTPUT_TEXT("Accumulation finished. Waiting to optimize... Or manipulate samples...");
    state = ManualManipulation;
  }
  else if(!firstHeadPans.empty())
  {
    currentHeadPan = firstHeadPans.back();
    firstHeadPans.pop_back();
    state = WaitForHeadToReachPosition;
  }
  else if(firstHeadPans.empty() && secondHeadPans.size() == headPans.size())
  {
    startCamera == CameraInfo::upper ? swapCameras(CameraInfo::lower) : swapCameras(CameraInfo::upper);
    currentHeadPan = secondHeadPans.back();
    secondHeadPans.pop_back();
    state = WaitForHeadToReachPosition;
  }
  else if(firstHeadPans.empty() && !secondHeadPans.empty())
  {
    currentHeadPan = secondHeadPans.back();
    secondHeadPans.pop_back();
    state = WaitForHeadToReachPosition;
  }
  currentHeadTilt = currentCamera == CameraInfo::upper ? upperCameraHeadTilt : lowerCameraHeadTilt;

  nextHeadAngleRequest.pan = currentHeadPan;
  nextHeadAngleRequest.tilt = currentHeadTilt;
  nextHeadAngleRequest.speed = headSpeed;
}
void AutomaticCameraCalibrator::update(CameraResolutionRequest& cameraResolutionRequest)
{
  CameraResolution::Resolutions nextUpper, nextLower;
  switch(nextResolution)
  {
    case NextResolution::none:
      return;
    case NextResolution::hiResUpper:
      nextUpper = CameraResolution::Resolutions::w640h480;
      nextLower = CameraResolution::Resolutions::w320h240;
      break;
    case NextResolution::hiResLower:
      nextUpper = CameraResolution::Resolutions::w320h240;
      nextLower = CameraResolution::Resolutions::w640h480;
      break;
    default:
      ASSERT(false);
  }
  if(theCameraResolution.resolutionUpper != nextUpper || theCameraResolution.resolutionLower != nextLower)
  {
    if(!cameraResolutionRequest.setRequest(nextUpper, nextLower))
    {
      OUTPUT_TEXT("Changing the resolution is not permitted! Calibration aborted...");
      abort();
    }
  }
  nextResolution = NextResolution::none;
}
void AutomaticCameraCalibrator::accumulate()
{
  if(theCameraInfo.camera != currentCamera)
    return;

  auto isInsideImage = [&](const Vector2i & point)
  {
    return point.x() >= 0 && point.x() < theCameraInfo.width
           && point.y() >= 0 && point.y() < theCameraInfo.height;
  };

  std::vector<Sample> pointsOnALine;
  for(const LinesPercept::Line& line : theLinesPercept.lines)
  {
    for(const Vector2i& point : line.spotsInImg)
    {
      // store all necessary information in the sample
      Vector2f pointOnField;
      if(!isInsideImage(point))
        continue;
      else if(!Transformation::imageToRobot(point.x(), point.y(), theCameraMatrix, theCameraInfo, pointOnField))
      {
        OUTPUT_TEXT("MEEEK! Point not on field!" << (theCameraInfo.camera == CameraInfo::upper ? " Upper " : " Lower "));
        continue;
      }

      Sample sample;
      sample.pointInImage = point;
      sample.pointOnField = pointOnField;
      sample.torsoMatrix = theTorsoMatrix;
      sample.headYaw = theJointAngles.angles[Joints::headYaw];
      sample.headPitch = theJointAngles.angles[Joints::headPitch];
      sample.cameraInfo = theCameraInfo;

      bool sufficientDistanceToOtherSamples = true;
      for(const Sample& testSample : pointsOnALine)
      {
        Vector2f difference = sample.pointOnField - testSample.pointOnField;
        if(testSample.cameraInfo.camera != sample.cameraInfo.camera)
          continue;
        if(difference.norm() < minimumSampleDistance)
          sufficientDistanceToOtherSamples = false;
      }
      if(sufficientDistanceToOtherSamples)
      {
        samples.push_back(sample);
        pointsOnALine.push_back(sample);
      }
    }
  }

  accumulationTimestamp = Time::getCurrentSystemTime();
  state = WaitForUser;
}
Exemple #6
0
KickEngine::KickEngine()
{
  params.reserve(10);

  char dirname[260];

  sprintf(dirname, "%s/Config/KickEngine/", File::getBHDir());
  DIR* dir = opendir(dirname);
  ASSERT(dir);
  struct dirent* file = readdir(dir);

  while(file != nullptr)
  {
    char name[260] = "";
    sprintf(name, "KickEngine/%s", file->d_name);

    if(strstr(name, ".kmc"))
    {
      InMapFile stream(name);
      ASSERT(stream.exists());

      KickEngineParameters parameters;
      stream >> parameters;

      sprintf(name, "%s", file->d_name);
      for(int i = 0; i < 260; i++)
      {
        if(name[i] == '.')
          name[i] = 0;
      }
      strcpy(parameters.name, name);

      if(KickRequest::getKickMotionFromName(parameters.name) < KickRequest::none)
        params.push_back(parameters);
      else
      {
        OUTPUT_TEXT("Warning: KickRequest is missing the id for " << parameters.name);
        fprintf(stderr, "Warning: KickRequest is missing the id for %s \n", parameters.name);
      }
    }
    file = readdir(dir);
  }
void AutomaticCameraCalibrator::insertSample(Vector2i point, CameraInfo::Camera camera)
{
  if(insertionOnCamera != theCameraInfo.camera)
    return;
  Sample sample;
  if(!Transformation::imageToRobot(point.x(), point.y(), theCameraMatrix, theCameraInfo, sample.pointOnField))
  {
    OUTPUT_TEXT("MEEEK! Point not on field!" << (theCameraInfo.camera == CameraInfo::upper ? " Upper " : " Lower "));
    return;
  }
  sample.pointInImage = point;
  sample.torsoMatrix = theTorsoMatrix;
  sample.headYaw = theJointAngles.angles[Joints::headYaw];
  sample.headPitch = theJointAngles.angles[Joints::headPitch];
  sample.cameraInfo = theCameraInfo;
  samples.push_back(sample);
  lastInsertedSample = sample;
  lastActionWasInsertion = true;
  alreadyRevertedInsertion = false;
  insertionValueExistant = false;
}
Exemple #8
0
/***********************************************************
*N_Goodness -- Used to test the Goodness of Fit
*
************************************************************/
void
N_Goodness (int ngrp, int nparm, double Parms[], int bounded[],
	    int Nobs, double Xi[], double Yp[], double Yn[], double Ls[],
	    int Xg[], double SR[] )
{
  double *GYpp;			/*predicted positive depdendent values */
  double *GEp;			/*estimated probability */
  double *P;
  double *Gvar, *GYsum, *Gcount;
  int *GrpSize;			/*numbers of obs. in each group. */
  int i, j, k, l;
  double gfit;
    

  /* Note: have to redefine those var. because the data must be */
  /* changed in order to compute goodness-of-fit statistic. */
  GYpp = DVECTOR (1, Nobs);
  GEp = DVECTOR (1, Nobs);
  GYsum = DVECTOR(1, Nobs);
  Gcount = DVECTOR(1, Nobs);
  Gvar = DVECTOR(1, Nobs);
  GrpSize = IVECTOR (1, ngrp);
  P = DVECTOR (1, nparm);



/**** compute the GrpSize[] -- # of obs. in each group***/
  for (i = 1; i <= ngrp; i++)
    GrpSize[i] = 0;

  for (j = 1; j <= Nobs; j++)
    GrpSize[Xg[j]] += 1;


  for (j = 6; j <= nparm; j++)
    P[j - 5] = Parms[j]; 

/** compute the estimated prob. and expected obs.  *******/

  Predict(Xi, Ls, Nobs, Parms, GEp);
  SortByLs (Nobs, ngrp, GrpSize, Ls, Yp, Yn, GYpp, GEp);

  

  for (i = 1; i <= Nobs; i++)
    {
      GYsum[i] = Yp[i] + Yn[i];
      GYpp[i] = GEp[i] * GYsum[i];
      Gvar[i] = GYsum[i] * GEp[i] * (1 - GEp[i]) * (1.0 + (GYsum[i] - 1.0) * P[Xg[i]]);
    }


  /*** compute the goodness-of-fit test for litter data. *********/
  gfit = 0.0;

  for (k = 1; k <= Nobs; k++)
    {
    if (Gvar[k] > 0)
      {
      SR[k] = Gvar[k] > 0 ? (Yp[k] - GYpp[k])/sqrt(Gvar[k]) : 0;
      gfit += SR[k]*SR[k];
      }

    }


 /*** declare random array size for debug output */


  OUTPUT_TEXT ("\n\n                               Litter Data");
  OUTPUT_TEXT ("\n\n           Lit.-Spec.              Litter                          Scaled");
  OUTPUT_TEXT (    "   Dose       Cov.     Est._Prob.   Size    Expected   Observed   Residual");
  OUTPUT_TEXT (    "--------------------------------------------------------------------------");
/*                           11111111112222222222333333333344444444445555555555666666666677777777778 */
/*                  12345678901234567890123456789012345678901234567890123456789012345678901234567890 */
/*                  XXXXXXXXX XXXXXXXXX     XXXXXX     XXXXX     XXXXXXX     XXXXX   XXXXXXXXX */

/*  GXi[0] = 1000000.0;*/
  for (i = 1; i <= Nobs; i++)
    {
      if (i > 1 && Xi[i] > Xi[i - 1])
	{
	  fprintf (fp_out, "\n");
	}
      fprintf (fp_out,
#ifndef RBMDS
	       "%9.4f %9.4f     %6.3f     %5.0f     %7.3f     %5.0f   %9.4f\n"
#else
	       "%9.4f %9.4f     %30.22g %5.0f     %30.22g     %5.0f   %9.4f\n"
#endif
	       , Xi[i], Ls[i], GEp[i], GYsum[i], GYpp[i], Yp[i],
	       (Gvar[i] > 0 ? (Yp[i] - GYpp[i])/sqrt(Gvar[i]) : 0));
    }




  FREE_DVECTOR (GYpp, 1, Nobs);
  FREE_DVECTOR (GEp, 1, Nobs);
  FREE_DVECTOR (GYsum, 1, Nobs);
  FREE_DVECTOR (Gcount, 1, Nobs);
  FREE_DVECTOR (Gvar, 1, Nobs);
  FREE_IVECTOR (GrpSize, 1, ngrp);
  FREE_IVECTOR (P, 1, nparm);
}
void AutomaticCameraCalibratorHandlerInsertion::setActive(std::string view)
{
  if(!ImageViewAdapter::addListener(this, view))
    OUTPUT_TEXT("AutomaticCameraCalibratorInsertion is already active for \"" + view + "\"");
}