void CFootBotMotorGroundSensor::Update() {
      /* We make the assumption that the foot-bot is rotated only wrt to Z */
      CFloorEntity& cFloorEntity = m_cSpace.GetFloorEntity();
      const CVector3& cEntityPos = GetEntity().GetEmbodiedEntity().GetPosition();
      const CQuaternion& cEntityRot = GetEntity().GetEmbodiedEntity().GetOrientation();
      CRadians cRotZ, cRotY, cRotX;
      cEntityRot.ToEulerAngles( cRotZ, cRotY, cRotX );
      CVector2 cCenterPos(cEntityPos.GetX(), cEntityPos.GetY());
      CVector2 cSensorPos;
      for(UInt32 i = 0; i < CCI_FootBotMotorGroundSensor::NUM_READINGS; ++i) {
         cSensorPos  = m_tReadings[i].Offset;
         cSensorPos.Rotate(cRotZ);
	 cSensorPos *= 0.01;
	 cSensorPos += cCenterPos;
         const CColor& cColor = cFloorEntity.GetColorAtPoint(cSensorPos.GetX(),cSensorPos.GetY());
         m_tReadings[i].Value = cColor.ToGrayScale()/255*FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.GetSpan();

	 if( m_fNoiseLevel > 0.0f ) {
	    AddNoise(i);
	 }

	 /* Normalize reading between 0 and 1, only if calibration has been performed */
         if( m_bCalibrated ) {
	    m_tReadings[i].Value = FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.NormalizeValue(m_tReadings[i].Value);
	 }
      }
   }
void MainWindow::AddNoise()
{
    double mag = ui->noiseSpinBox->value();

    AddNoise(&outImage, mag, ui->colorNoiseCheckBox->isChecked());

    DrawDisplayImage();
}
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    connect(ui->openButton, SIGNAL(clicked()), this, SLOT(OpenImage()));
    connect(ui->saveButton, SIGNAL(clicked()), this, SLOT(SaveImage()));
    connect(ui->saveDisplayButton, SIGNAL(clicked()), this, SLOT(SaveDisplayImage()));
    connect(ui->resetButton, SIGNAL(clicked()), this, SLOT(ResetImage()));
    connect(ui->toggleButton, SIGNAL(pressed()), this, SLOT(ToggleImage()));
    connect(ui->toggleButton, SIGNAL(released()), this, SLOT(ToggleImage()));

    connect(ui->bwButton, SIGNAL(clicked()), this, SLOT(BlackWhiteImage()));
    connect(ui->noiseButton, SIGNAL(clicked()), this, SLOT(AddNoise()));
    connect(ui->meanButton, SIGNAL(clicked()), this, SLOT(MeanBlurImage()));
    connect(ui->medianButton, SIGNAL(clicked()), this, SLOT(MedianImage()));
    connect(ui->gaussianBlurButton, SIGNAL(clicked()), this, SLOT(GaussianBlurImage()));
    connect(ui->firstDerivButton, SIGNAL(clicked()), this, SLOT(FirstDerivImage()));
    connect(ui->secondDerivButton, SIGNAL(clicked()), this, SLOT(SecondDerivImage()));
    connect(ui->sharpenButton, SIGNAL(clicked()), this, SLOT(SharpenImage()));
    connect(ui->sobelButton, SIGNAL(clicked()), this, SLOT(SobelImage()));
    connect(ui->bilateralButton, SIGNAL(clicked()), this, SLOT(BilateralImage()));
    connect(ui->halfButton, SIGNAL(clicked()), this, SLOT(HalfImage()));
    connect(ui->rotateButton, SIGNAL(clicked()), this, SLOT(RotateImage()));
    connect(ui->peaksButton, SIGNAL(clicked()), this, SLOT(FindPeaksImage()));
    connect(ui->houghButton, SIGNAL(clicked()), this, SLOT(HoughImage()));
    connect(ui->crazyButton, SIGNAL(clicked()), this, SLOT(CrazyImage()));
    connect(ui->randomButton, SIGNAL(clicked()), this, SLOT(RandomSeedImage()));
    connect(ui->pixelButton, SIGNAL(clicked()), this, SLOT(PixelSeedImage()));
    connect(ui->histogramButton, SIGNAL(clicked()), this, SLOT(HistogramSeedImage()));

    connect(ui->actionOpen, SIGNAL(triggered()), this, SLOT(OpenImage()));
    connect(ui->zoomSlider, SIGNAL(valueChanged(int)), this, SLOT(Zoom(int)));
    connect(ui->brightnessSlider, SIGNAL(valueChanged(int)), this, SLOT(Brightness(int)));
    connect(ui->verticalScrollBar, SIGNAL(valueChanged(int)), this, SLOT(Scroll(int)));
    connect(ui->horizontalScrollBar, SIGNAL(valueChanged(int)), this, SLOT(Scroll(int)));

    ui->meanBox->setValue(2);
    ui->medianBox->setValue(2);
    ui->blurSpinBox->setValue(2.0);
    ui->firstDerivSpinBox->setValue(2.0);
    ui->secondDerivSpinBox->setValue(2.0);
    ui->sharpenSigmaSpinBox->setValue(2.0);
    ui->sharpenMagSpinBox->setValue(1.0);
    ui->bilateralSigmaSSpinBox->setValue(2.0);
    ui->bilateralSigmaISpinBox->setValue(20.0);
    ui->noiseSpinBox->setValue(10.0);
    ui->orientationSpinBox->setValue(10.0);
    ui->peakThresholdSpinBox->setValue(10.0);
    ui->colorNoiseCheckBox->setChecked(true);
    ui->zoomSlider->setValue(0);
    ui->brightnessSlider->setValue(0);
    ui->clusterBox->setValue(4);

    displayImage = QImage(ui->ImgDisplay->width(), ui->ImgDisplay->height(), QImage::Format_RGB32);
}
Exemple #4
0
int main(int argc, char *argv[])
{
  double InputPattern[MAXN] = {0};
  double InputPatternWithNoise[MAXN] = {0};
  double OutputPattern[MAXN] = {0};
  //double En = 0.0;
  int Noise = 0;
  int indexPattern;
  char Menu = '\0';
  char FileName[MAXFILENAME];
  const char MenuChars[] = "ELNeln";

  if (!(argc == 2 || argc ==3 ))
  {
    printf("\n\tUSAGE: Hopfield <patterns filename>\n\n");
    printf("\n\tUSAGE: Hopfield <patterns filename> <noisy patterns filename>\n\n");
    exit(EXIT_FAILURE);
  }

  printf("Hopfield's ANN Simulation: Associative Memory " APPNAME_VERSION "\n\n"
    "- Patterns file name: %s   loading .... ", argv[1]);

  readFile(argv[1]);
  printf("ready\n"
    "- Number of neurons: %d * %d = %d, number of patterns: %d\n",
    nRows, nColumns, patSize, nPatterns);

  printf("- Learning patterns by hebbian learning rule .... ");
  LearnJ(nPatterns, patSize, J);
  printf("ready\n");
  printf("- Learning result: connection matrix, size %d x %d\n\n", 
         nRows*nColumns, nRows*nColumns);

  if (argc == 3)
  {
    printf("- Noisy patterns file name: %s\n\n", argv[2]);
    printf("- Loading noisy patterns data .... ");
    readNoisyFile(argv[2]);
    printf("ready\n\n");
    printf("- Number of noisy patterns: %d\n\n", nNoisyPatterns);
  }

  while (Menu != 'E' && Menu != 'e')
  {
    if (argc == 2 && (Menu == 'L' || Menu == 'l'))
    {
      fgetc(stdin); /* remove /n previous input */
      printf("- Patterns file name: ");
      fgets(FileName, MAXFILENAME, stdin);
      if (FileName[strlen(FileName) - 1] == '\n')
      {
        FileName[strlen(FileName) - 1] = '\0'; /* remove /n input */
      }
      printf("\n- Loading patterns data .... ");
      readFile(FileName);
      printf("ready\n\n"
        "- Number of neurons: %d * %d = %d, number of patterns: %d\n\n",
        nRows, nColumns, patSize, nPatterns);
      printf("- Learning patterns by hebbian learning rule .... ");
      LearnJ(nPatterns, patSize, J);
      printf("ready\n\n");
      printf("- Learning result: 1 connection matrix, size %d x %d\n\n", 
             nRows * nColumns, nRows * nColumns);
    }

    if (strchr(MenuChars, Menu) != NULL)
    {
      switch (argc)
      {
      case 2:
        printf("\n- Choose pattern to disturb by noise, index (1..%d): ", nPatterns);
        scanf(" %d", &indexPattern);
        puts("");
        if (indexPattern < 1 || indexPattern > nPatterns+1)
        {
          fprintf(stderr, "\n\tERROR: index %d out of range\n\n", indexPattern);
          getchar();
          exit(EXIT_FAILURE);
        }
        indexPattern--;
        showIndexedPattern(indexPattern);
        puts("");
        CopyPattern(patSize, Patterns[indexPattern], InputPattern);
        CopyPattern(patSize, Patterns[indexPattern], InputPatternWithNoise);
        printf("- Noise [%%]: ");
        scanf(" %d", &Noise);
        
        AddNoise(patSize, indexPattern, InputPatternWithNoise, Noise);
        /* printf("- Pattern as vector:\n\n"); */
        /* showPatternAsVector(InputPatternWithNoise); */
        printf("\n\n- Pattern as 2D image and noisy pixels:\n\n");
        CalcAssociations(patSize, J, InputPattern, InputPatternWithNoise, OutputPattern);
        puts("");
        break;
      case 3:
        printf("\n- Choose noisy pattern, index (1..%d): ", nNoisyPatterns);
        scanf(" %d", &indexPattern);
        puts("");
        if (indexPattern < 1 || indexPattern > nNoisyPatterns+1)
        {
          fprintf(stderr, "\n\tERROR: index %d out of range\n\n", indexPattern);
          getchar();
          exit(EXIT_FAILURE);
        }
        indexPattern--;
        showIndexedNoisyPattern(indexPattern);
        puts("");
        CopyPattern(patSize, NoisyPatterns[indexPattern], InputPattern);
        /* printf("- Pattern as vector:\n\n"); */
        /* showPatternAsVector(InputPattern); */
        printf("\n\n- Pattern as 2D image:\n\n");
        CalcAssociations(patSize, J, InputPattern, InputPattern, OutputPattern);
        puts("");
        break;
      default:
        printf("\n\tSYSTEM ERROR: this should never happen!\n\n");
        getchar();
        exit(EXIT_FAILURE);
      }
    }
    while (getchar() != '\n')
    {
      ;
    }
    fflush(stdin);
    if (argc == 2)
    {
      printf("- E(xit), L(oad new patterns data file), N(ext simulation) ..... ");
    }
    else
    {
      printf("- E(xit), N(ext simulation) ..... ");
    }
    Menu = getchar();
  }

  return 0;
}
Exemple #5
0
   void CEPuckLightSensor::Update() {
      /* Here we assume that the e-puck is rotated only wrt to the Z axis */

      /* Erase readings */
      for(size_t i = 0; i < m_tReadings.size(); ++i) {
         m_tReadings[i].Value = 0.0f;
      }
      /* Get e-puck position */
      const CVector3& cEPuckPosition = GetEntity().GetEmbodiedEntity().GetPosition();
      /* Get e-puck orientation */
      CRadians cTmp1, cTmp2, cOrientationZ;
      GetEntity().GetEmbodiedEntity().GetOrientation().ToEulerAngles(cOrientationZ, cTmp1, cTmp2);
      /* Buffer for calculating the light--e-puck distance */
      CVector3 cLightDistance;
      /* Buffer for the angle of the sensor wrt to the e-puck */
      CRadians cLightAngle;
      /* Initialize the occlusion check ray start to the baseline of the e-puck */
      CRay cOcclusionCheckRay;
      cOcclusionCheckRay.SetStart(cEPuckPosition);
      /* Buffer to store the intersection data */
      CSpace::SEntityIntersectionItem<CEmbodiedEntity> sIntersectionData;
      /* Ignore the sensing ropuck when checking for occlusions */
      TEmbodiedEntitySet tIgnoreEntities;
      tIgnoreEntities.insert(&GetEntity().GetEmbodiedEntity());
      /*
       * 1. go through the list of light entities in the scene
       * 2. check if a light is occluded
       * 3. if it isn't, distribute the reading across the sensors
       *    NOTE: the readings are additive
       * 4. go through the sensors and clamp their values
       */
      try{
         CSpace::TAnyEntityMap& tEntityMap = m_cSpace.GetEntitiesByType("light_entity");
         for(CSpace::TAnyEntityMap::iterator it = tEntityMap.begin();
             it != tEntityMap.end();
             ++it) {
            /* Get a reference to the light */
            CLightEntity& cLight = *(any_cast<CLightEntity*>(it->second));
            /* Consider the light only if it has non zero intensity */
            if(cLight.GetIntensity() > 0.0f) {
               /* Get the light position */
               const CVector3& cLightPosition = cLight.GetPosition();
               /* Set the ray end */
               cOcclusionCheckRay.SetEnd(cLightPosition);
               /* Check occlusion between the e-puck and the light */
               if(! m_cSpace.GetClosestEmbodiedEntityIntersectedByRay(sIntersectionData,
                                                                      cOcclusionCheckRay,
                                                                      tIgnoreEntities)) {
                  /* The light is not occluded */
                  if(m_bShowRays) GetEntity().GetControllableEntity().AddCheckedRay(false, cOcclusionCheckRay);
                  /* Get the distance between the light and the e-puck */
                  cOcclusionCheckRay.ToVector(cLightDistance);
                  /* Linearly scale the distance with the light intensity
                     The greater the intensity, the smaller the distance */
                  cLightDistance /= cLight.GetIntensity();
                  /* Get the angle wrt to e-puck rotation */
                  cLightAngle = cLightDistance.GetZAngle();
                  cLightAngle -= cOrientationZ;
                  /* Transform it into counter-clockwise rotation */
                  cLightAngle.Negate().UnsignedNormalize();
                  /* Find reading corresponding to the sensor */
                  SInt16 nMin = 0;
                  for(SInt16 i = 1; i < NUM_READINGS; ++i){
                     if((cLightAngle - m_tReadings[i].Angle).GetAbsoluteValue() < (cLightAngle - m_tReadings[nMin].Angle).GetAbsoluteValue())
                        nMin = i;
                  }
                  /* Set the actual readings */
                  Real fReading = cLightDistance.Length();
                  m_tReadings[Modulo((SInt16)(nMin-1), NUM_READINGS)].Value += ComputeReading(fReading * Cos(cLightAngle - m_tReadings[Modulo(nMin-1, NUM_READINGS)].Angle));
                  m_tReadings[  nMin                                ].Value += ComputeReading(fReading);
                  m_tReadings[Modulo((SInt16)(nMin+1), NUM_READINGS)].Value += ComputeReading(fReading * Cos(cLightAngle - m_tReadings[Modulo(nMin+1, NUM_READINGS)].Angle));
               }
               else {
                  /* The ray is occluded */
                  if(m_bShowRays) {
                     GetEntity().GetControllableEntity().AddCheckedRay(true, cOcclusionCheckRay);
                     GetEntity().GetControllableEntity().AddIntersectionPoint(cOcclusionCheckRay, sIntersectionData.TOnRay);
                  }
               }
            }
         }
      }
      catch(argos::CARGoSException& e){

      }

      /* Now go through the sensors, add noise and clamp their values if above 1024 or under 1024 */
      for(size_t i = 0; i < m_tReadings.size(); ++i) {
         if(m_fNoiseLevel>0.0f)
            AddNoise(i);
         if(m_tReadings[i].Value > 1024.0f)
            m_tReadings[i].Value = 1024.0f;
         if(m_tReadings[i].Value < 0.0f)
            m_tReadings[i].Value = 0.0f;
      }
   }
void DwiPhantomGenerationFilter< TOutputScalarType >
::GenerateData()
{
    if (m_NoiseVariance < 0)
        m_NoiseVariance = 0.001;

    if (!m_SimulateBaseline)
    {
        MITK_INFO << "Baseline image values are set to default. Noise variance value is treated as SNR!";
        if (m_NoiseVariance <= 0)
            m_NoiseVariance = 0.0001;
        if (m_NoiseVariance>99)
            m_NoiseVariance = 0;
        else
        {
            m_NoiseVariance = m_DefaultBaseline/(m_NoiseVariance*m_SignalScale);
            m_NoiseVariance *= m_NoiseVariance;
        }
    }

    m_RandGen = Statistics::MersenneTwisterRandomVariateGenerator::New();
    m_RandGen->SetSeed();

    typename OutputImageType::Pointer outImage = OutputImageType::New();
    outImage->SetSpacing( m_Spacing );
    outImage->SetOrigin( m_Origin );
    outImage->SetDirection( m_DirectionMatrix );
    outImage->SetLargestPossibleRegion( m_ImageRegion );
    outImage->SetBufferedRegion( m_ImageRegion );
    outImage->SetRequestedRegion( m_ImageRegion );
    outImage->SetVectorLength(m_GradientList.size());
    outImage->Allocate();
    typename OutputImageType::PixelType pix;
    pix.SetSize(m_GradientList.size());
    pix.Fill(0.0);
    outImage->FillBuffer(pix);
    this->SetNthOutput (0, outImage);

    double minSpacing = m_Spacing[0];
    if (m_Spacing[1]<minSpacing)
        minSpacing = m_Spacing[1];
    if (m_Spacing[2]<minSpacing)
        minSpacing = m_Spacing[2];

    m_DirectionImageContainer = ItkDirectionImageContainer::New();
    for (int i=0; i<m_SignalRegions.size(); i++)
    {
        itk::Vector< float, 3 > nullVec; nullVec.Fill(0.0);
        ItkDirectionImage::Pointer img = ItkDirectionImage::New();
        img->SetSpacing( m_Spacing );
        img->SetOrigin( m_Origin );
        img->SetDirection( m_DirectionMatrix );
        img->SetRegions( m_ImageRegion );
        img->Allocate();
        img->FillBuffer(nullVec);
        m_DirectionImageContainer->InsertElement(m_DirectionImageContainer->Size(), img);
    }
    m_NumDirectionsImage = ItkUcharImgType::New();
    m_NumDirectionsImage->SetSpacing( m_Spacing );
    m_NumDirectionsImage->SetOrigin( m_Origin );
    m_NumDirectionsImage->SetDirection( m_DirectionMatrix );
    m_NumDirectionsImage->SetRegions( m_ImageRegion );
    m_NumDirectionsImage->Allocate();
    m_NumDirectionsImage->FillBuffer(0);

    m_SNRImage = ItkFloatImgType::New();
    m_SNRImage->SetSpacing( m_Spacing );
    m_SNRImage->SetOrigin( m_Origin );
    m_SNRImage->SetDirection( m_DirectionMatrix );
    m_SNRImage->SetRegions( m_ImageRegion );
    m_SNRImage->Allocate();
    m_SNRImage->FillBuffer(0);

    vtkSmartPointer<vtkCellArray> m_VtkCellArray = vtkSmartPointer<vtkCellArray>::New();
    vtkSmartPointer<vtkPoints>    m_VtkPoints = vtkSmartPointer<vtkPoints>::New();

    m_BaselineImages = 0;
    for( unsigned int i=0; i<m_GradientList.size(); i++)
        if (m_GradientList[i].GetNorm()<=0.0001)
            m_BaselineImages++;

    typedef ImageRegionIterator<OutputImageType>      IteratorOutputType;
    IteratorOutputType it (outImage, m_ImageRegion);

    // isotropic tensor
    itk::DiffusionTensor3D<float> isoTensor;
    isoTensor.Fill(0);
    float e1 = m_GreyMatterAdc;
    float e2 = m_GreyMatterAdc;
    float e3 = m_GreyMatterAdc;
    isoTensor.SetElement(0,e1);
    isoTensor.SetElement(3,e2);
    isoTensor.SetElement(5,e3);
    m_MaxBaseline = GetTensorL2Norm(isoTensor);

    GenerateTensors();

    // simulate measurement
    m_MeanBaseline = 0;
    double noiseStdev = sqrt(m_NoiseVariance);
    while(!it.IsAtEnd())
    {
        pix = it.Get();
        typename OutputImageType::IndexType index = it.GetIndex();

        int numDirs = 0;
        for (int i=0; i<m_SignalRegions.size(); i++)
        {
            ItkUcharImgType::Pointer region = m_SignalRegions.at(i);

            if (region->GetPixel(index)!=0)
            {
                numDirs++;
                pix += SimulateMeasurement(m_TensorList[i], m_TensorWeight[i]);

                // set direction image pixel
                ItkDirectionImage::Pointer img = m_DirectionImageContainer->GetElement(i);
                itk::Vector< float, 3 > pixel = img->GetPixel(index);
                vnl_vector_fixed<double, 3> dir = m_TensorDirection.at(i);
                dir.normalize();
                dir *= m_TensorWeight.at(i);
                pixel.SetElement(0, dir[0]);
                pixel.SetElement(1, dir[1]);
                pixel.SetElement(2, dir[2]);
                img->SetPixel(index, pixel);

                vtkSmartPointer<vtkPolyLine> container = vtkSmartPointer<vtkPolyLine>::New();
                itk::ContinuousIndex<double, 3> center;
                center[0] = index[0];
                center[1] = index[1];
                center[2] = index[2];
                itk::Point<double> worldCenter;
                outImage->TransformContinuousIndexToPhysicalPoint( center, worldCenter );
                itk::Point<double> worldStart;
                worldStart[0] = worldCenter[0]-dir[0]/2 * minSpacing;
                worldStart[1] = worldCenter[1]-dir[1]/2 * minSpacing;
                worldStart[2] = worldCenter[2]-dir[2]/2 * minSpacing;
                vtkIdType id = m_VtkPoints->InsertNextPoint(worldStart.GetDataPointer());
                container->GetPointIds()->InsertNextId(id);
                itk::Point<double> worldEnd;
                worldEnd[0] = worldCenter[0]+dir[0]/2 * minSpacing;
                worldEnd[1] = worldCenter[1]+dir[1]/2 * minSpacing;
                worldEnd[2] = worldCenter[2]+dir[2]/2 * minSpacing;
                id = m_VtkPoints->InsertNextPoint(worldEnd.GetDataPointer());
                container->GetPointIds()->InsertNextId(id);
                m_VtkCellArray->InsertNextCell(container);
            }
        }

        if (numDirs>1)
        {
            for (int i=0; i<m_GradientList.size(); i++)
                pix[i] /= numDirs;
        }
        else if (numDirs==0)
        {
            if (m_SimulateBaseline)
                pix = SimulateMeasurement(isoTensor, 1.0);
            else
                pix.Fill(0.0);
        }

        m_MeanBaseline += pix[0];
        it.Set(pix);
        m_NumDirectionsImage->SetPixel(index, numDirs);
        if (m_NoiseVariance>0)
            m_SNRImage->SetPixel(index, pix[0]/(noiseStdev*m_SignalScale));
        ++it;
    }
    m_MeanBaseline /= m_ImageRegion.GetNumberOfPixels();
    if (m_NoiseVariance>0)
        MITK_INFO << "Mean SNR: " << m_MeanBaseline/(noiseStdev*m_SignalScale);
    else
        MITK_INFO << "No noise added";

    // add rician noise
    it.GoToBegin();
    while(!it.IsAtEnd())
    {
        pix = it.Get();
        AddNoise(pix);
        it.Set(pix);
        ++it;
    }

    // generate fiber bundle
    vtkSmartPointer<vtkPolyData> directionsPolyData = vtkSmartPointer<vtkPolyData>::New();
    directionsPolyData->SetPoints(m_VtkPoints);
    directionsPolyData->SetLines(m_VtkCellArray);
    m_OutputFiberBundle = mitk::FiberBundleX::New(directionsPolyData);
}