void mitk::SegmentationInterpolationController::SetChangedSlice( const Image* sliceDiff, unsigned int sliceDimension, unsigned int sliceIndex, unsigned int timeStep )
{
  if ( !sliceDiff ) return;
  if ( sliceDimension > 2 ) return;
  if ( timeStep >= m_SegmentationCountInSlice.size() ) return;
  if ( sliceIndex >= m_SegmentationCountInSlice[timeStep][sliceDimension].size() ) return;

  unsigned int dim0(0);
  unsigned int dim1(1);

  // determine the other two dimensions
  switch (sliceDimension)
  {
    default:
    case 2:
      dim0 = 0; dim1 = 1; break;
    case 1:
      dim0 = 0; dim1 = 2; break;
    case 0:
      dim0 = 1; dim1 = 2; break;
  }

  //mitkIpPicDescriptor* rawSlice = const_cast<Image*>(sliceDiff)->GetSliceData()->GetPicDescriptor(); // we promise not to change anything!

  mitk::ImageReadAccessor readAccess(sliceDiff);
  unsigned char* rawSlice = (unsigned char*) readAccess.GetData();
  if (!rawSlice) return;

  AccessFixedDimensionByItk_1( sliceDiff, ScanChangedSlice, 2, SetChangedSliceOptions(sliceDimension, sliceIndex, dim0, dim1, timeStep, rawSlice) );

  //PrintStatus();

  Modified();
}
  /**
  * This test takes a generated gradient mitk image. Then an IGTL Message is produced
  * using the ImageToIGTLMessageFilter. In the end it is tested, wether the image data in both images is equivalent.
  */
  void Equal_ContentOfIGTLImageMessageAndMitkImage_True(unsigned int dim)
  {
    m_TestImage = mitk::ImageGenerator::GenerateGradientImage<unsigned char>(dim, dim, 1u);

    m_ImageToIGTLMessageFilter->SetInput(m_TestImage);

    m_ImageToIGTLMessageFilter->GenerateData();

    mitk::IGTLMessage::Pointer resultMessage = m_ImageToIGTLMessageFilter->GetOutput();

    CPPUNIT_ASSERT_MESSAGE("Output of ImageToIGTLMessageFilter was null", resultMessage != nullptr);

    igtl::MessageBase::Pointer msgBase = resultMessage->GetMessage();
    igtl::ImageMessage* igtlImageMessage = (igtl::ImageMessage*)(msgBase.GetPointer());

    CPPUNIT_ASSERT_MESSAGE("Output of ImageToIGTLMessageFilter was not of type igtl::ImageMessage", igtlImageMessage != nullptr);

    const void* outputBuffer = igtlImageMessage->GetScalarPointer();

    CPPUNIT_ASSERT_MESSAGE("Output Buffer was null", outputBuffer != nullptr);

    mitk::ImageReadAccessor readAccess(m_TestImage, m_TestImage->GetChannelData(0));
    const void* inputBuffer = readAccess.GetData();

    CPPUNIT_ASSERT_MESSAGE("Input Buffer was null", inputBuffer != nullptr);

    CPPUNIT_ASSERT_MESSAGE("Images were not identical", memcmp(inputBuffer, outputBuffer, dim*dim) == 0);
  }
void ReadPixel(mitk::PixelType, mitk::Image::Pointer image, itk::Index<3> indexPoint, double& value)
{
  if (image->GetDimension() == 2)
  {
    mitk::ImagePixelReadAccessor<PixelType,2> readAccess(image, image->GetSliceData(0));
    itk::Index<2> idx;
    idx[0] = indexPoint[0];
    idx[1] = indexPoint[1];
    value = readAccess.GetPixelByIndex(idx);
  }
  else if (image->GetDimension() == 3)
  {
    mitk::ImagePixelReadAccessor<PixelType,3> readAccess(image, image->GetVolumeData(0));
    itk::Index<3> idx;
    idx[0] = indexPoint[0];
    idx[1] = indexPoint[1];
    idx[2] = indexPoint[2];
    value = readAccess.GetPixelByIndex(idx);
  }
  else
  {
    //unhandled
  }
}
void mitk::SegmentationInterpolationController::ScanWholeVolume( const itk::Image<DATATYPE, 3>*, const Image* volume, unsigned int timeStep )
{
  if (!volume) return;
  if ( timeStep >= m_SegmentationCountInSlice.size() ) return;

  ImageReadAccessor readAccess(volume, volume->GetVolumeData(timeStep));

  for (unsigned int slice = 0; slice < volume->GetDimension(2); ++slice)
  {
    const DATATYPE* rawVolume = static_cast<const DATATYPE*>( readAccess.GetData() ); // we again promise not to change anything, we'll just count
    const DATATYPE* rawSlice = rawVolume + ( volume->GetDimension(0) * volume->GetDimension(1) * slice );

    ScanChangedSlice<DATATYPE>( nullptr, SetChangedSliceOptions(2, slice, 0, 1, timeStep, rawSlice) );
  }
}
  void OpenCVToMitkImageFilter::InsertOpenCVImageAsMitkTimeSlice(cv::Mat openCVImage, Image::Pointer mitkImage, int timeStep)
  {
    // convert it to an mitk::Image
    this->SetOpenCVMat(openCVImage);
    this->Modified();
    this->Update();

    //insert it as a timeSlice
    mitkImage->GetGeometry(timeStep)->SetSpacing(this->GetOutput()->GetGeometry()->GetSpacing());
    mitkImage->GetGeometry(timeStep)->SetOrigin(this->GetOutput()->GetGeometry()->GetOrigin());
    mitkImage->GetGeometry(timeStep)->SetIndexToWorldTransform(this->GetOutput()->GetGeometry()->GetIndexToWorldTransform());

    mitk::ImageReadAccessor readAccess(this->GetOutput());
    mitkImage->SetImportVolume(readAccess.GetData(), timeStep);

    mitkImage->Modified();
    mitkImage->Update();

    m_ImageMutex->Lock();
    m_Image = mitkImage;
    m_ImageMutex->Unlock();
  }
void mitk::ImageToIGTLMessageFilter::GenerateData()
{
  // MITK_INFO << "ImageToIGTLMessageFilter.GenerateData()";
  for (unsigned int i = 0; i < this->GetNumberOfIndexedOutputs(); ++i)
  {
    mitk::IGTLMessage* output = this->GetOutput(i);
    assert(output);

    const mitk::Image* img = this->GetInput(i);

    int dims = img->GetDimension();
    int chn = img->GetNumberOfChannels();

    // MITK_INFO << "Sending image. Dimensions: " << dims << " Channels: " << chn << "\n";

    if (dims < 1)
    {
      MITK_ERROR << "Can not handle dimensionless images";
    }
    if (dims > 3)
    {
      MITK_ERROR << "Can not handle more than three dimensions";
      continue;
    }

    if (chn != 1)
    {
      MITK_ERROR << "Can not handle anything but one channel. Image contained " << chn;
      continue;
    }

    igtl::ImageMessage::Pointer imgMsg = igtl::ImageMessage::New();

    // TODO: Which kind of coordinate system does MITK really use?
    imgMsg->SetCoordinateSystem(igtl::ImageMessage::COORDINATE_RAS);

    // We could do this based on the host endiannes, but that's weird.
    // We instead use little endian, as most modern systems are little endian,
    // so there will probably not be an endian swap involved.
    imgMsg->SetEndian(igtl::ImageMessage::ENDIAN_LITTLE);

    // Set number of components.
    mitk::PixelType type = img->GetPixelType();
    imgMsg->SetNumComponents(type.GetNumberOfComponents());

    // Set scalar type.
    switch (type.GetComponentType())
    {
    case itk::ImageIOBase::CHAR:
      imgMsg->SetScalarTypeToInt8();
      break;
    case itk::ImageIOBase::UCHAR:
      imgMsg->SetScalarTypeToUint8();
      break;
    case itk::ImageIOBase::SHORT:
      imgMsg->SetScalarTypeToInt16();
      break;
    case itk::ImageIOBase::USHORT:
      imgMsg->SetScalarTypeToUint16();
      break;
    case itk::ImageIOBase::INT:
      imgMsg->SetScalarTypeToInt32();
      break;
    case itk::ImageIOBase::UINT:
      imgMsg->SetScalarTypeToUint32();
      break;
    case itk::ImageIOBase::LONG:
      // OIGTL doesn't formally support 64bit int scalars, but if they are
      // ever added,
      // they will have the identifier 8 assigned.
      imgMsg->SetScalarType(8);
      break;
    case itk::ImageIOBase::ULONG:
      // OIGTL doesn't formally support 64bit uint scalars, but if they are
      // ever added,
      // they will have the identifier 9 assigned.
      imgMsg->SetScalarType(9);
      break;
    case itk::ImageIOBase::FLOAT:
      // The igtl library has no method for this. Correct type is 10.
      imgMsg->SetScalarType(10);
      break;
    case itk::ImageIOBase::DOUBLE:
      // The igtl library has no method for this. Correct type is 11.
      imgMsg->SetScalarType(11);
      break;
    default:
      MITK_ERROR << "Can not handle pixel component type "
        << type.GetComponentType();
      return;
    }

    // Set transformation matrix.
    vtkMatrix4x4* matrix = img->GetGeometry()->GetVtkMatrix();

    float matF[4][4];
    for (size_t i = 0; i < 4; ++i)
    {
      for (size_t j = 0; j < 4; ++j)
      {
        matF[i][j] = matrix->GetElement(i, j);
      }
    }
    imgMsg->SetMatrix(matF);

    float spacing[3];
    auto spacingImg = img->GetGeometry()->GetSpacing();

    for (int i = 0; i < 3; ++i)
      spacing[i] = spacingImg[i];

    imgMsg->SetSpacing(spacing);

    // Set dimensions.
    int sizes[3];
    for (size_t j = 0; j < 3; ++j)
    {
      sizes[j] = img->GetDimension(j);
    }
    imgMsg->SetDimensions(sizes);

    // Allocate and copy data.
    imgMsg->AllocatePack();
    imgMsg->AllocateScalars();

    size_t num_pixel = sizes[0] * sizes[1] * sizes[2];
    void* out = imgMsg->GetScalarPointer();
    {
      // Scoped, so that readAccess will be released ASAP.
      mitk::ImageReadAccessor readAccess(img, img->GetChannelData(0));
      const void* in = readAccess.GetData();

      memcpy(out, in, num_pixel * type.GetSize());
    }

    // We want to byte swap to little endian. We would like to just
    // swap by number of bytes for each component, but itk::ByteSwapper
    // is templated over element type, not over element size. So we need to
    // switch on the size and use types of the same size.
    size_t num_scalars = num_pixel * type.GetNumberOfComponents();
    switch (type.GetComponentType())
    {
    case itk::ImageIOBase::CHAR:
    case itk::ImageIOBase::UCHAR:
      // No endian conversion necessary, because a char is exactly one byte!
      break;
    case itk::ImageIOBase::SHORT:
    case itk::ImageIOBase::USHORT:
      itk::ByteSwapper<short>::SwapRangeFromSystemToLittleEndian((short*)out,
        num_scalars);
      break;
    case itk::ImageIOBase::INT:
    case itk::ImageIOBase::UINT:
      itk::ByteSwapper<int>::SwapRangeFromSystemToLittleEndian((int*)out,
        num_scalars);
      break;
    case itk::ImageIOBase::LONG:
    case itk::ImageIOBase::ULONG:
      itk::ByteSwapper<long>::SwapRangeFromSystemToLittleEndian((long*)out,
        num_scalars);
      break;
    case itk::ImageIOBase::FLOAT:
      itk::ByteSwapper<float>::SwapRangeFromSystemToLittleEndian((float*)out,
        num_scalars);
      break;
    case itk::ImageIOBase::DOUBLE:
      itk::ByteSwapper<double>::SwapRangeFromSystemToLittleEndian(
        (double*)out, num_scalars);
      break;
    }

    imgMsg->Pack();

    output->SetMessage(imgMsg.GetPointer());
  }
}
int mitkToFDistanceImageToSurfaceFilterTest(int /* argc */, char* /*argv*/[])
{
  MITK_TEST_BEGIN("ToFDistanceImageToSurfaceFilter");
  mitk::ToFDistanceImageToSurfaceFilter::Pointer filter = mitk::ToFDistanceImageToSurfaceFilter::New();
  // create test image
  unsigned int dimX =204;
  unsigned int dimY =204;
  mitk::Image::Pointer image = mitk::ImageGenerator::GenerateRandomImage<float>(dimX,dimY);
  //initialize intrinsic parameters with some arbitrary values
  ToFScalarType focalLengthX = 295.78960;
  ToFScalarType focalLengthY = 296.348535;
  ToFPoint2D focalLengthXY;
  focalLengthXY[0]=focalLengthX;
  focalLengthXY[1]=focalLengthY;
  ToFScalarType k1=-0.36,k2=-0.14,p1=0.001,p2=-0.00;
  ToFPoint2D principalPoint;
  principalPoint[0] = 103.576546;
  principalPoint[1] = 100.1532;
  mitk::CameraIntrinsics::Pointer cameraIntrinsics = mitk::CameraIntrinsics::New();
  cameraIntrinsics->SetFocalLength(focalLengthX,focalLengthY);
  cameraIntrinsics->SetPrincipalPoint(principalPoint[0],principalPoint[1]);
  cameraIntrinsics->SetDistorsionCoeffs(k1,k2,p1,p2);
  // test SetCameraIntrinsics()
  filter->SetCameraIntrinsics(cameraIntrinsics);
  MITK_TEST_CONDITION_REQUIRED((focalLengthX==filter->GetCameraIntrinsics()->GetFocalLengthX()),"Testing SetCameraIntrinsics with focalLength");
  ToFPoint2D pp;
  pp[0] = filter->GetCameraIntrinsics()->GetPrincipalPointX();
  pp[1] = filter->GetCameraIntrinsics()->GetPrincipalPointY();
  MITK_TEST_CONDITION_REQUIRED(mitk::Equal(principalPoint,pp),"Testing SetCameraIntrinsics with principalPoint()");
  // test SetInterPixelDistance()
  ToFPoint2D interPixelDistance;
  interPixelDistance[0] = 0.04564;
  interPixelDistance[1] = 0.0451564;
  filter->SetInterPixelDistance(interPixelDistance);
  ToFPoint2D ipD = filter->GetInterPixelDistance();
  MITK_TEST_CONDITION_REQUIRED(mitk::Equal(ipD,interPixelDistance),"Testing Set/GetInterPixelDistance()");

  // test SetReconstructionMode()
  filter->SetReconstructionMode(mitk::ToFDistanceImageToSurfaceFilter::WithInterPixelDistance);
  MITK_TEST_CONDITION_REQUIRED(filter->GetReconstructionMode() == mitk::ToFDistanceImageToSurfaceFilter::WithInterPixelDistance,"Testing Set/GetReconstructionMode()");

  // test Set/GetInput()
  filter->SetInput(image);
  MITK_TEST_CONDITION_REQUIRED((image==filter->GetInput()),"Testing Set/GetInput()");

  // test filter without subset (without interpixeldistance)
  MITK_INFO<<"Test filter with subset without interpixeldistance ";
  filter->SetReconstructionMode(mitk::ToFDistanceImageToSurfaceFilter::WithOutInterPixelDistance);
    MITK_TEST_CONDITION_REQUIRED(filter->GetReconstructionMode() == mitk::ToFDistanceImageToSurfaceFilter::WithOutInterPixelDistance,"Testing Set/GetReconstructionMode()");

  vtkSmartPointer<vtkPoints> expectedResult = vtkSmartPointer<vtkPoints>::New();
  expectedResult->SetDataTypeToDouble();
  unsigned int counter = 0;
  double* point = new double[3];
//  MITK_INFO<<"Test";
//  MITK_INFO<<"focal: "<<focalLength;
//  MITK_INFO<<"inter: "<<interPixelDistance;
//  MITK_INFO<<"prinicipal: "<<principalPoint;
  for (unsigned int j=0; j<dimX; j++)
  {
    for (unsigned int i=0; i<dimY; i++)
    {
      itk::Index<2> index = {{ i, j }};
      float distance = 0.0;

      try
      {
        mitk::ImagePixelReadAccessor<float,2> readAccess(image, image->GetSliceData());
        distance = readAccess.GetPixelByIndex(index);
      }
      catch(mitk::Exception& e)
      {
          MITK_ERROR << "Image read exception!" << e.what();
      }


      ToFPoint3D coordinate = mitk::ToFProcessingCommon::IndexToCartesianCoordinates(i,j,distance,focalLengthX,focalLengthY,principalPoint[0],principalPoint[1]);
//      if ((i==0)&&(j==0))
//      {
//        MITK_INFO<<"Distance test: "<<distance;
//        MITK_INFO<<"coordinate test: "<<coordinate;
//      }
      point[0] = coordinate[0];
      point[1] = coordinate[1];
      point[2] = coordinate[2];
      unsigned int pointID = index[0] + index[1]*dimY;
      //MITK_INFO<<"id: "<<pointID;
      //MITK_INFO<<"counter: "<<counter;
      if (distance!=0)
      {
        expectedResult->InsertPoint(pointID,point);
      }
      counter++;
    }
  }
  filter->Update();
  mitk::Surface::Pointer resultSurface = filter->GetOutput();
  vtkSmartPointer<vtkPoints> result = vtkSmartPointer<vtkPoints>::New();
  result->SetDataTypeToDouble();
  result = resultSurface->GetVtkPolyData()->GetPoints();

  MITK_TEST_CONDITION_REQUIRED((expectedResult->GetNumberOfPoints()==result->GetNumberOfPoints()),"Test if number of points in surface is equal");
  bool pointSetsEqual = true;
  for (unsigned int i=0; i<expectedResult->GetNumberOfPoints(); i++)
  {
    double* expected = expectedResult->GetPoint(i);
    double* res = result->GetPoint(i);

    ToFPoint3D expectedPoint;
    expectedPoint[0] = expected[0];
    expectedPoint[1] = expected[1];
    expectedPoint[2] = expected[2];
    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    if (!mitk::Equal(expectedPoint,resultPoint))
    {
//      MITK_INFO << i;
      pointSetsEqual = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(pointSetsEqual,"Testing filter without subset");

  // test filter without subset (with interpixeldistance)
  MITK_INFO<<"Test filter with subset with interpixeldistance ";
  filter->SetReconstructionMode(mitk::ToFDistanceImageToSurfaceFilter::WithInterPixelDistance);
  MITK_TEST_CONDITION_REQUIRED(filter->GetReconstructionMode() == mitk::ToFDistanceImageToSurfaceFilter::WithInterPixelDistance,"Testing Set/GetReconstructionMode()");
  // calculate focal length considering inter pixel distance
  ToFScalarType focalLength = (focalLengthX*interPixelDistance[0]+focalLengthY*interPixelDistance[1])/2.0;
  expectedResult = vtkSmartPointer<vtkPoints>::New();
  expectedResult->SetDataTypeToDouble();
  counter = 0;
  point = new double[3];
//  MITK_INFO<<"Test";
//  MITK_INFO<<"focal: "<<focalLength;
//  MITK_INFO<<"inter: "<<interPixelDistance;
//  MITK_INFO<<"prinicipal: "<<principalPoint;
  for (unsigned int j=0; j<dimX; j++)
  {
    for (unsigned int i=0; i<dimY; i++)
    {
        itk::Index<2> index = {{ i, j }};
        float distance = 0.0;
        try
        {
          mitk::ImagePixelReadAccessor<float,2> readAccess(image, image->GetSliceData());
          distance = readAccess.GetPixelByIndex(index);
        }
        catch(mitk::Exception& e)
        {
            MITK_ERROR << "Image read exception!" << e.what();
        }
      ToFPoint3D coordinate = mitk::ToFProcessingCommon::IndexToCartesianCoordinatesWithInterpixdist(i,j,distance,focalLength,interPixelDistance,principalPoint);
//      if ((i==0)&&(j==0))
//      {
//        MITK_INFO<<"Distance test: "<<distance;
//        MITK_INFO<<"coordinate test: "<<coordinate;
//      }
      point[0] = coordinate[0];
      point[1] = coordinate[1];
      point[2] = coordinate[2];
      unsigned int pointID = index[0] + index[1]*dimY;
      //MITK_INFO<<"id: "<<pointID;
      //MITK_INFO<<"counter: "<<counter;
      if (distance!=0)
      {
        expectedResult->InsertPoint(pointID,point);
      }
      counter++;
    }
  }
  filter->Modified();
  filter->Update();
  resultSurface = filter->GetOutput();
  result = vtkSmartPointer<vtkPoints>::New();
  result->SetDataTypeToDouble();
  result = resultSurface->GetVtkPolyData()->GetPoints();
  MITK_TEST_CONDITION_REQUIRED((expectedResult->GetNumberOfPoints()==result->GetNumberOfPoints()),"Test if number of points in surface is equal");
  pointSetsEqual = true;
  for (unsigned int i=0; i<expectedResult->GetNumberOfPoints(); i++)
  {
    double* expected = expectedResult->GetPoint(i);
    double* res = result->GetPoint(i);

    ToFPoint3D expectedPoint;
    expectedPoint[0] = expected[0];
    expectedPoint[1] = expected[1];
    expectedPoint[2] = expected[2];
    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    if (!mitk::Equal(expectedPoint,resultPoint))
    {
//      MITK_INFO << i;
      MITK_INFO<<"expected: "<<expectedPoint;
      MITK_INFO<<"result: "<<resultPoint;
      pointSetsEqual = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(pointSetsEqual,"Testing filter without subset");


  //Backwardtransformation test without interpixeldistance
  bool backwardTransformationsPointsEqual = true;
  for (unsigned int i=0; i<expectedResult->GetNumberOfPoints(); i++)
  {
    double* expected = expectedResult->GetPoint(i);
    double* res = result->GetPoint(i);

    ToFPoint3D expectedPoint;
    expectedPoint[0] = expected[0];
    expectedPoint[1] = expected[1];
    expectedPoint[2] = expected[2];
    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    ToFPoint3D expectedPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinates(expectedPoint,focalLengthXY,principalPoint);

    ToFPoint3D resultPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinates(resultPoint,focalLengthXY,principalPoint);

    if (!mitk::Equal(expectedPointBackward,resultPointBackward))
    {
//      MITK_INFO << i;
//      MITK_INFO<<"expected: "<<expectedPoint;
//      MITK_INFO<<"result: "<<resultPoint;
      backwardTransformationsPointsEqual = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(backwardTransformationsPointsEqual,"Testing backward transformation without interpixeldistance");

  //Backwardtransformation test with interpixeldistance
  backwardTransformationsPointsEqual = true;
  for (unsigned int i=0; i<expectedResult->GetNumberOfPoints(); i++)
  {
    double* expected = expectedResult->GetPoint(i);
    double* res = result->GetPoint(i);

    ToFPoint3D expectedPoint;
    expectedPoint[0] = expected[0];
    expectedPoint[1] = expected[1];
    expectedPoint[2] = expected[2];
    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    ToFPoint3D expectedPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinatesWithInterpixdist(expectedPoint,focalLength,interPixelDistance,principalPoint);

    ToFPoint3D resultPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinatesWithInterpixdist(resultPoint,focalLength,interPixelDistance,principalPoint);

    if (!mitk::Equal(expectedPointBackward,resultPointBackward))
    {
//      MITK_INFO << i;
//      MITK_INFO<<"expected: "<<expectedPoint;
//      MITK_INFO<<"result: "<<resultPoint;
      backwardTransformationsPointsEqual = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(backwardTransformationsPointsEqual,"Testing backward transformation with interpixeldistance");


  //Backwardtransformation test compare to original input without interpixeldistance
  bool compareToInput = true;
  for (unsigned int i=0; i<result->GetNumberOfPoints(); i++)
  {
    double* res = result->GetPoint(i);

    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    ToFPoint3D resultPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinates(resultPoint,focalLengthXY,principalPoint);

    itk::Index<2> index = {{ (int) (resultPointBackward[0]+0.5), (int) (resultPointBackward[1]+0.5) }};
    float distanceBackward = 0.0;

    try
    {
      mitk::ImagePixelReadAccessor<float,2> readAccess(image, image->GetSliceData());
      distanceBackward = readAccess.GetPixelByIndex(index);
    }
    catch(mitk::Exception& e)
    {
        MITK_ERROR << "Image read exception!" << e.what();
    }

    if (!mitk::Equal(distanceBackward,(float) resultPointBackward[2]))
    {
      MITK_INFO<<"expected: " << resultPointBackward[2];
      MITK_INFO<<"result: "<< distanceBackward;
      compareToInput = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(compareToInput,"Testing backward transformation compared to original image without interpixeldistance");


  //Backwardtransformation test compare to original input with interpixeldistance
  compareToInput = true;
  for (unsigned int i=0; i<result->GetNumberOfPoints(); i++)
  {
    double* res = result->GetPoint(i);

    ToFPoint3D resultPoint;
    resultPoint[0] = res[0];
    resultPoint[1] = res[1];
    resultPoint[2] = res[2];

    ToFPoint3D resultPointBackward =
        mitk::ToFProcessingCommon::CartesianToIndexCoordinatesWithInterpixdist(resultPoint,focalLength,interPixelDistance,principalPoint);

    itk::Index<2> pixelIndex = {{ (int) (resultPointBackward[0]+0.5), (int) (resultPointBackward[1]+0.5) }};
    float distanceBackward = 0.0;
    try
    {
      mitk::ImagePixelReadAccessor<float,2> readAccess(image, image->GetSliceData());
      distanceBackward = readAccess.GetPixelByIndex(pixelIndex);
    }
    catch(mitk::Exception& e)
    {
        MITK_ERROR << "Image read exception!" << e.what();
    }

    if (!mitk::Equal(distanceBackward, (float) resultPointBackward[2]))
    {
      compareToInput = false;
    }
  }
  MITK_TEST_CONDITION_REQUIRED(compareToInput,"Testing backward transformation compared to original image with interpixeldistance");

  //clean up
  delete point;
  //  expectedResult->Delete();

  MITK_TEST_END();

}
    // The tests all do the same, only in different directions
    void testRoutine(mitk::SliceNavigationController::ViewDirection viewDirection)
    {
        int dim;
        switch(viewDirection)
        {
        case(mitk::SliceNavigationController::Axial): dim = 2; break;
        case(mitk::SliceNavigationController::Frontal): dim = 1; break;
        case(mitk::SliceNavigationController::Sagittal): dim = 0; break;
        case(mitk::SliceNavigationController::Original): dim = -1; break; // This is just to get rid of a warning
        }

        /* Fill segmentation
         *
         * 1st slice: 3x3 square segmentation
         * 2nd slice: empty
         * 3rd slice: 1x1 square segmentation in corner
         * -> 2nd slice should become 2x2 square in corner
         *
         * put accessor in scope
         */

        itk::Index<3> currentPoint;
        {
            mitk::ImagePixelWriteAccessor<mitk::Tool::DefaultSegmentationDataType, 3> writeAccessor(m_SegmentationImage);

            // Fill 3x3 slice
            currentPoint[dim] = m_CenterPoint[dim] - 1;
            for (int i=-1; i<=1; ++i)
            {
                for (int j=-1; j<=1; ++j)
                {
                    currentPoint[(dim+1)%3] = m_CenterPoint[(dim+1)%3] + i;
                    currentPoint[(dim+2)%3] = m_CenterPoint[(dim+2)%3] + j;
                    writeAccessor.SetPixelByIndexSafe(currentPoint, 1);
                }
            }
            // Now i=j=1, set point two slices up
            currentPoint[dim] = m_CenterPoint[dim] + 1;
            writeAccessor.SetPixelByIndexSafe(currentPoint, 1);
        }

    //        mitk::IOUtil::Save(m_SegmentationImage, "SOME PATH");

        m_InterpolationController->SetSegmentationVolume(m_SegmentationImage);
        m_InterpolationController->SetReferenceVolume(m_ReferenceImage);

        // This could be easier...
        mitk::SliceNavigationController::Pointer navigationController = mitk::SliceNavigationController::New();
        navigationController->SetInputWorldTimeGeometry(m_SegmentationImage->GetTimeGeometry());
        navigationController->Update(viewDirection);
        mitk::Point3D pointMM;
        m_SegmentationImage->GetTimeGeometry()->GetGeometryForTimeStep(0)->IndexToWorld(m_CenterPoint, pointMM);
        navigationController->SelectSliceByPoint(pointMM);
        auto plane = navigationController->GetCurrentPlaneGeometry();
        mitk::Image::Pointer interpolationResult = m_InterpolationController->Interpolate(dim, m_CenterPoint[dim], plane, 0);

    //        mitk::IOUtil::Save(interpolationResult, "SOME PATH");

        // Write result into segmentation image
        vtkSmartPointer<mitkVtkImageOverwrite> reslicer = vtkSmartPointer<mitkVtkImageOverwrite>::New();
        reslicer->SetInputSlice(interpolationResult->GetSliceData()->GetVtkImageAccessor(interpolationResult)->GetVtkImageData());
        reslicer->SetOverwriteMode(true);
        reslicer->Modified();
        mitk::ExtractSliceFilter::Pointer extractor =  mitk::ExtractSliceFilter::New(reslicer);
        extractor->SetInput(m_SegmentationImage);
        extractor->SetTimeStep(0);
        extractor->SetWorldGeometry(plane);
        extractor->SetVtkOutputRequest(true);
        extractor->SetResliceTransformByGeometry(m_SegmentationImage->GetTimeGeometry()->GetGeometryForTimeStep(0));
        extractor->Modified();
        extractor->Update();

    //        mitk::IOUtil::Save(m_SegmentationImage, "SOME PATH");

        // Check a 4x4 square, the center of which needs to be filled
        mitk::ImagePixelReadAccessor<mitk::Tool::DefaultSegmentationDataType, 3> readAccess(m_SegmentationImage);
        currentPoint = m_CenterPoint;

        for (int i=-1; i<=2; ++i)
        {
            for (int j=-1; j<=2; ++j)
            {
                currentPoint[(dim+1)%3] = m_CenterPoint[(dim+1)%3] + i;
                currentPoint[(dim+2)%3] = m_CenterPoint[(dim+2)%3] + j;

                if (i == -1 || i == 2 || j == -1 || j == 2)
                {
                    CPPUNIT_ASSERT_MESSAGE("Have false positive segmentation.", readAccess.GetPixelByIndexSafe(currentPoint) == 0);
                }
                else
                {
                    CPPUNIT_ASSERT_MESSAGE("Have false negative segmentation.", readAccess.GetPixelByIndexSafe(currentPoint) == 1);
                }
            }
        }
    }
void mitk::ComputeContourSetNormalsFilter::GenerateData()
{
  unsigned int numberOfInputs = this->GetNumberOfIndexedInputs();
  this->CreateOutputsForAllInputs(numberOfInputs);

  //Iterating over each input
  for(unsigned int i = 0; i < numberOfInputs; i++)
  {
    //Getting the inputs polydata and polygons
    Surface* currentSurface = const_cast<Surface*>( this->GetInput(i) );
    vtkPolyData* polyData = currentSurface->GetVtkPolyData();

    vtkSmartPointer<vtkCellArray> existingPolys = polyData->GetPolys();

    vtkSmartPointer<vtkPoints> existingPoints = polyData->GetPoints();

    existingPolys->InitTraversal();

    vtkIdType* cell (NULL);
    vtkIdType cellSize (0);

    //The array that contains all the vertex normals of the current polygon
    vtkSmartPointer<vtkDoubleArray> normals = vtkSmartPointer<vtkDoubleArray>::New();
    normals->SetNumberOfComponents(3);
    normals->SetNumberOfTuples(polyData->GetNumberOfPoints());

    //If the current contour is an inner contour then the direction is -1
    //A contour lies inside another one if the pixel values in the direction of the normal is 1
    m_NegativeNormalCounter = 0;
    m_PositiveNormalCounter = 0;

    //Iterating over each polygon
    for( existingPolys->InitTraversal(); existingPolys->GetNextCell(cellSize, cell);)
    {
      if(cellSize < 3)continue;

      //First we calculate the current polygon's normal
      double polygonNormal[3] = {0.0};

      double p1[3];
      double p2[3];

      double v1[3];
      double v2[3];

      existingPoints->GetPoint(cell[0], p1);
      unsigned int index = cellSize*0.5;
      existingPoints->GetPoint(cell[index], p2);

      v1[0] = p2[0]-p1[0];
      v1[1] = p2[1]-p1[1];
      v1[2] = p2[2]-p1[2];

      for (unsigned int k = 2; k < cellSize; k++)
      {
        index = cellSize*0.25;
        existingPoints->GetPoint(cell[index], p1);
        index = cellSize*0.75;
        existingPoints->GetPoint(cell[index], p2);

        v2[0] = p2[0]-p1[0];
        v2[1] = p2[1]-p1[1];
        v2[2] = p2[2]-p1[2];

        vtkMath::Cross(v1,v2,polygonNormal);
        if (vtkMath::Norm(polygonNormal) != 0)
          break;
      }

      vtkMath::Normalize(polygonNormal);

      //Now we start computing the normal for each vertex

      double vertexNormalTemp[3];
      existingPoints->GetPoint(cell[0], p1);
      existingPoints->GetPoint(cell[1], p2);

      v1[0] = p2[0]-p1[0];
      v1[1] = p2[1]-p1[1];
      v1[2] = p2[2]-p1[2];

      vtkMath::Cross(v1,polygonNormal,vertexNormalTemp);

      vtkMath::Normalize(vertexNormalTemp);

      double vertexNormal[3];

      for (unsigned j = 0; j < cellSize-2; j++)
      {
        existingPoints->GetPoint(cell[j+1], p1);
        existingPoints->GetPoint(cell[j+2], p2);

        v1[0] = p2[0]-p1[0];
        v1[1] = p2[1]-p1[1];
        v1[2] = p2[2]-p1[2];

        vtkMath::Cross(v1,polygonNormal,vertexNormal);

        vtkMath::Normalize(vertexNormal);

        double finalNormal[3];

        finalNormal[0] = (vertexNormal[0] + vertexNormalTemp[0])*0.5;
        finalNormal[1] = (vertexNormal[1] + vertexNormalTemp[1])*0.5;
        finalNormal[2] = (vertexNormal[2] + vertexNormalTemp[2])*0.5;

        //Here we determine the direction of the normal
        if (j == 0 && m_SegmentationBinaryImage)
        {
          Point3D worldCoord;
          worldCoord[0] = p1[0]+finalNormal[0]*m_MaxSpacing;
          worldCoord[1] = p1[1]+finalNormal[1]*m_MaxSpacing;
          worldCoord[2] = p1[2]+finalNormal[2]*m_MaxSpacing;

          double val = 0.0;
          mitk::ImagePixelReadAccessor<unsigned char> readAccess(m_SegmentationBinaryImage);
          mitk::Index3D idx;
          m_SegmentationBinaryImage->GetGeometry()->WorldToIndex(worldCoord, idx);
          val = readAccess.GetPixelByIndexSafe(idx);

          if (val == 1.0)
          {
              ++m_PositiveNormalCounter;
          }
          else
          {
              ++m_NegativeNormalCounter;
          }
        }

        vertexNormalTemp[0] = vertexNormal[0];
        vertexNormalTemp[1] = vertexNormal[1];
        vertexNormalTemp[2] = vertexNormal[2];

        vtkIdType id = cell[j+1];
        normals->SetTuple(id,finalNormal);
      }

      existingPoints->GetPoint(cell[0], p1);
      existingPoints->GetPoint(cell[1], p2);

      v1[0] = p2[0]-p1[0];
      v1[1] = p2[1]-p1[1];
      v1[2] = p2[2]-p1[2];

      vtkMath::Cross(v1,polygonNormal,vertexNormal);

      vtkMath::Normalize(vertexNormal);

      vertexNormal[0] = (vertexNormal[0] + vertexNormalTemp[0])*0.5;
      vertexNormal[1] = (vertexNormal[1] + vertexNormalTemp[1])*0.5;
      vertexNormal[2] = (vertexNormal[2] + vertexNormalTemp[2])*0.5;

      vtkIdType id = cell[0];
      normals->SetTuple(id,vertexNormal);
      id = cell[cellSize-1];
      normals->SetTuple(id,vertexNormal);

      int normalDirection(-1);

      if(m_NegativeNormalCounter < m_PositiveNormalCounter)
      {
          normalDirection = 1;
      }

      for(unsigned int n = 0; n < normals->GetNumberOfTuples(); n++)
      {
          double normal[3];
          normals->GetTuple(n,normal);
          normal[0] = normalDirection*normal[0];
          normal[1] = normalDirection*normal[1];
          normal[2] = normalDirection*normal[2];
      }


    }//end for all cells

    Surface::Pointer surface = this->GetOutput(i);
    surface->GetVtkPolyData()->GetCellData()->SetNormals(normals);
  }//end for all inputs

  //Setting progressbar
  if (this->m_UseProgressBar)
    mitk::ProgressBar::GetInstance()->Progress(this->m_ProgressStepSize);
}
Beispiel #10
0
void QmitkODFDetailsView::UpdateOdf()
{

  try
  {
    m_Controls->m_OverviewBox->setVisible(true);
    if (m_ImageNode.IsNull() || !m_MultiWidget)
    {
      m_Controls->m_ODFRenderWidget->setVisible(false);
      m_Controls->m_OdfBox->setVisible(false);
      m_Controls->m_OverviewBox->setVisible(false);
      return;
    }

    // restore the input image label ( needed in case the last run resulted into an exception )
    m_Controls->m_InputImageLabel->setText(m_ImageNode->GetName().c_str());

    // ODF Normalization Property
    mitk::OdfNormalizationMethodProperty* nmp = dynamic_cast<mitk::OdfNormalizationMethodProperty*>(m_ImageNode->GetProperty( "Normalization" ));
    if(nmp)
      m_OdfNormalization = nmp->GetNormalization();

    m_TemplateOdf = itk::OrientationDistributionFunction<float,QBALL_ODFSIZE>::GetBaseMesh();
    m_OdfTransform = vtkSmartPointer<vtkTransform>::New();
    m_OdfTransform->Identity();
    m_OdfVals = vtkSmartPointer<vtkDoubleArray>::New();
    m_OdfSource = vtkSmartPointer<vtkOdfSource>::New();
    itk::OrientationDistributionFunction<double, QBALL_ODFSIZE> odf;

    mitk::Point3D world = m_MultiWidget->GetCrossPosition();
    mitk::Point3D index;
    mitk::Image::Pointer img = dynamic_cast<mitk::Image*>(m_ImageNode->GetData());
    unsigned int *img_dimension = img->GetDimensions();
    img->GetGeometry()->WorldToIndex(world, index);

    float sum = 0;
    float max = itk::NumericTraits<float>::NonpositiveMin();
    float min = itk::NumericTraits<float>::max();
    QString values;
    QString overviewText;

    // check if dynamic_cast successfull and if the crosshair position is inside of the geometry of the ODF data
    // otherwise possible crash for a scenario with multiple nodes
    if (dynamic_cast<mitk::QBallImage*>(m_ImageNode->GetData()) && ( m_ImageNode->GetData()->GetGeometry()->IsInside(world) ) )
    {
      m_Controls->m_ODFRenderWidget->setVisible(true);
      m_Controls->m_OdfBox->setVisible(true);

      try
      {
        const mitk::QBallImage* qball_image = dynamic_cast< mitk::QBallImage* >( m_ImageNode->GetData() );

        // get access to the qball image data with explicitely allowing exceptions if memory locked
        mitk::ImageReadAccessor readAccess( qball_image, qball_image->GetVolumeData(0), mitk::ImageAccessorBase::ExceptionIfLocked );
        const float* qball_cPtr = static_cast< const float*>(readAccess.GetData());

        OdfVectorImgType::IndexType ind;
        ind[0] = (int)(index[0]+0.5);
        ind[1] = (int)(index[1]+0.5);
        ind[2] = (int)(index[2]+0.5);

        // pixel size = QBALL_ODFSIZE
        // position offset = standard offset
        unsigned int offset_to_data = QBALL_ODFSIZE * (ind[2] * img_dimension[1] * img_dimension[0] + ind[1] * img_dimension[0] + ind[0]);
        const float *pixel_data = qball_cPtr + offset_to_data;

        for (int i=0; i<QBALL_ODFSIZE; i++)
        {
          float val = pixel_data[i];
          odf.SetNthComponent(i, val);
          values += QString::number(i)+": "+QString::number(val)+"\n";
          sum += val;
          if (val>max)
            max = val;
          if (val<min)
            min = val;
        }
        float mean = sum/QBALL_ODFSIZE;

        QString pos = QString::number(ind[0])+", "+QString::number(ind[1])+", "+QString::number(ind[2]);
        overviewText += "Coordinates: "+pos+"\n";
        overviewText += "GFA: "+QString::number(odf.GetGeneralizedFractionalAnisotropy())+"\n";
        overviewText += "Sum: "+QString::number(sum)+"\n";
        overviewText += "Mean: "+QString::number(mean)+"\n";
        overviewText += "Min: "+QString::number(min)+"\n";
        overviewText += "Max: "+QString::number(max)+"\n";
        vnl_vector_fixed<double, 3> pd = odf.GetDirection(odf.GetPrincipleDiffusionDirection());
        overviewText += "Main Diffusion:\n     "+QString::number(pd[0])+"\n     "+QString::number(pd[1])+"\n     "+QString::number(pd[2])+"\n";

        m_Controls->m_OdfValuesTextEdit->setText(values);
        m_Controls->m_OverviewTextEdit->setVisible(true);
      }
      catch( mitk::Exception &e )
      {
        MITK_WARN << "LOCKED : " << e.what();
        m_Controls->m_ODFRenderWidget->setVisible(false);
        m_Controls->m_OdfBox->setVisible(false);
        m_Controls->m_OverviewTextEdit->setVisible(false);

        // reset the selection
        m_Controls->m_InputImageLabel->setText("<font color='green'>Click image to restore rendering!</font>");
      }
    }
    else if (dynamic_cast<mitk::TensorImage*>(m_ImageNode->GetData()))
    {
      m_Controls->m_ODFRenderWidget->setVisible(true);
      m_Controls->m_OdfBox->setVisible(false);


      const mitk::TensorImage* qball_image = dynamic_cast< mitk::TensorImage*>(m_ImageNode->GetData());

      // pixel access block
      try
      {
        // get access to the qball image data with explicitely allowing exceptions if memory locked
        mitk::ImageReadAccessor readAccess( qball_image, qball_image->GetVolumeData(0), mitk::ImageAccessorBase::ExceptionIfLocked );
        const float* qball_cPtr = static_cast< const float*>(readAccess.GetData());

        TensorImageType::IndexType ind;
        ind[0] = (int)(index[0]+0.5);
        ind[1] = (int)(index[1]+0.5);
        ind[2] = (int)(index[2]+0.5);

        // 6 - tensorsize
        // remaining computation - standard offset
        unsigned int offset_to_data = 6 * (ind[2] * img_dimension[1] * img_dimension[0] + ind[1] * img_dimension[0] + ind[0]);
        const float *pixel_data = qball_cPtr + offset_to_data;

        float tensorelems[6] = {
          *(pixel_data    ),
          *(pixel_data + 1),
          *(pixel_data + 2),
          *(pixel_data + 3),
          *(pixel_data + 4),
          *(pixel_data + 5),
        };

        itk::DiffusionTensor3D<float> tensor(tensorelems);
        odf.InitFromTensor(tensor);

        /** Array of eigen-values. */
        typedef itk::FixedArray<float, 3> EigenValuesArrayType;
        /** Matrix of eigen-vectors. */
        typedef itk::Matrix<float, 3, 3> MatrixType;
        typedef itk::Matrix<float, 3, 3> EigenVectorsMatrixType;

        EigenValuesArrayType eigenValues;
        EigenVectorsMatrixType eigenvectors;

        QString pos = QString::number(ind[0])+", "+QString::number(ind[1])+", "+QString::number(ind[2]);
        overviewText += "Coordinates: "+pos+"\n";
        overviewText += "FA: "+QString::number(tensor.GetFractionalAnisotropy())+"\n";
        overviewText += "RA: "+QString::number(tensor.GetRelativeAnisotropy())+"\n";
        overviewText += "Trace: "+QString::number(tensor.GetTrace())+"\n";
        tensor.ComputeEigenAnalysis(eigenValues,eigenvectors);
        overviewText += "Eigenvalues:\n     "+QString::number(eigenValues[2])+"\n     "+QString::number(eigenValues[1])+"\n     "+QString::number(eigenValues[0])+"\n";
        overviewText += "Main Diffusion:\n     "+QString::number(eigenvectors(2, 0))+"\n     "+QString::number(eigenvectors(2, 1))+"\n     "+QString::number(eigenvectors(2, 2))+"\n";
        overviewText += "Values:\n     "+QString::number(tensorelems[0])+"\n     "+QString::number(tensorelems[1])+"\n     "+QString::number(tensorelems[2])+"\n     "+QString::number(tensorelems[3])+"\n     "+QString::number(tensorelems[4])+"\n     "+QString::number(tensorelems[5])+"\n     "+"\n";
        m_Controls->m_OverviewTextEdit->setVisible(true);
      }
      // end pixel access block
      catch(mitk::Exception &e )
      {
        MITK_WARN << "LOCKED : " << e.what();
        m_Controls->m_ODFRenderWidget->setVisible(false);
        m_Controls->m_OdfBox->setVisible(false);
        m_Controls->m_OverviewTextEdit->setVisible(false);

        // reset the selection
        m_Controls->m_InputImageLabel->setText("<font color='green'>Click image to restore rendering!</font>");
      }
    }
    else
    {
      m_Controls->m_ODFRenderWidget->setVisible(false);
      m_Controls->m_OdfBox->setVisible(false);
      overviewText += "Please reinit image geometry.\n";
    }

    // proceed only if the render widget is visible which indicates that the
    // predecessing computations were successfull
    if( m_Controls->m_ODFRenderWidget->isVisible() )
    {
      m_Controls->m_ODFDetailsWidget->SetParameters(odf);

      switch(m_OdfNormalization)
      {
      case 0:
        odf = odf.MinMaxNormalize();
        break;
      case 1:
        odf = odf.MaxNormalize();
        break;
      case 2:
        odf = odf.MaxNormalize();
        break;
      default:
        odf = odf.MinMaxNormalize();
      }

      m_Controls->m_ODFRenderWidget->GenerateODF(odf);
      m_Controls->m_OverviewTextEdit->setText(overviewText.toStdString().c_str());
    }
  }
  catch(...)
  {
    QMessageBox::critical(0, "Error", "Data could not be analyzed. The image might be corrupted.");
  }
}
Beispiel #11
0
int main( int argc, char* argv[] )
{
  mitkCommandLineParser parser;
  parser.setArgumentPrefix("--","-");

  parser.setTitle("Folder Registration");
  parser.setCategory("Preprocessing Tools");
  parser.setDescription("For detail description see http://docs.mitk.org/nightly/DiffusionMiniApps.html");
  parser.setContributor("MIC");

  // Add command line argument names
  parser.addArgument("help", "h",mitkCommandLineParser::Bool, "Help", "Show this help text");
  //parser.addArgument("usemask", "u", QVariant::Bool, "Use segmentations (derived resources) to exclude areas from registration metrics");
  parser.addArgument("input", "i", mitkCommandLineParser::InputDirectory, "Input:", "Input folder",us::Any(),false);
  parser.addArgument("output", "o", mitkCommandLineParser::OutputDirectory, "Output:", "Output folder (ending with /)",us::Any(),false);
  parser.addArgument("fixed", "f", mitkCommandLineParser::String, "Fixed images:", "Suffix for fixed image (if none is supplied first file matching moving pattern is chosen)",us::Any(),true);
  parser.addArgument("moving", "m", mitkCommandLineParser::String, "Moving images:", "Suffix for moving images",us::Any(),false);
  parser.addArgument("derived", "d", mitkCommandLineParser::String, "Derived resources:", "Derived resources suffixes (replaces suffix for moving images); comma separated",us::Any(),true);
  parser.addArgument("silent", "s", mitkCommandLineParser::Bool, "Silent:", "No xml progress output.");
  parser.addArgument("resample", "r", mitkCommandLineParser::String, "Resample (x,y,z)mm:", "Resample provide x,y,z spacing in mm (e.g. -r 1,1,3), is not applied to tensor data",us::Any());
  parser.addArgument("binary", "b", mitkCommandLineParser::Bool, "Binary:", "Speficies that derived resource are binary (interpolation using nearest neighbor)",us::Any());
  parser.addArgument("correct-origin", "c", mitkCommandLineParser::Bool, "Origin correction:", "Correct for large origin displacement. Use switch when you reveive:  Joint PDF summed to zero ",us::Any());
  parser.addArgument("sinc-int", "s", mitkCommandLineParser::Bool, "Windowed-sinc interpolation:", "Use windowed-sinc interpolation (3) instead of linear interpolation ",us::Any());


  std::map<std::string, us::Any> parsedArgs = parser.parseArguments(argc, argv);

  // Handle special arguments
  bool silent = false;
  bool isBinary = false;
  bool alignOrigin = false;

  {
    if (parsedArgs.size() == 0)
    {
      return EXIT_FAILURE;
    }

    if (parsedArgs.count("silent"))
      silent = true;

    if (parsedArgs.count("binary"))
      isBinary = true;

    if (parsedArgs.count("correct-origin"))
      alignOrigin = true;

    // Show a help message
    if ( parsedArgs.count("help") || parsedArgs.count("h"))
    {
      std::cout << parser.helpText();
      return EXIT_SUCCESS;
    }
  }
  std::string refPattern = "";
  bool useFirstMoving = false;
  std::string movingImgPattern = us::any_cast<std::string>(parsedArgs["moving"]);

  if (parsedArgs.count("fixed"))
  {
    refPattern = us::any_cast<std::string>(parsedArgs["fixed"]);
  }
  else
  {
    useFirstMoving = true;
    refPattern = movingImgPattern;
  }

  std::string outputPath = us::any_cast<std::string>(parsedArgs["output"]);

  std::string inputPath = us::any_cast<std::string>(parsedArgs["input"]);
  //QString resampleReference = parsedArgs["resample"].toString();
  //bool maskTumor = parsedArgs["usemask"].toBool();

  // if derived sources pattern is provided, populate QStringList with possible filename postfixes
  std::vector<std::string> derPatterns;

  if (parsedArgs.count("derived") || parsedArgs.count("d") )
  {
    std::string arg =  us::any_cast<std::string>(parsedArgs["derived"]);
    derPatterns = split(arg ,',');
  }


  std::vector<std::string> spacings;
  float spacing[] = { 0.0f, 0.0f, 0.0f };
  bool doResampling = false;
  if (parsedArgs.count("resample") || parsedArgs.count("d") )
  {
    std::string arg =  us::any_cast<std::string>(parsedArgs["resample"]);
    spacings = split(arg ,',');
    spacing[0] = atoi(spacings.at(0).c_str());
    spacing[1] = atoi(spacings.at(1).c_str());
    spacing[2] = atoi(spacings.at(2).c_str());
    doResampling = true;
  }

  MITK_INFO << "Input Folder : " << inputPath;
  MITK_INFO << "Looking for reference image ...";
  FileListType referenceFileList = CreateFileList(inputPath,refPattern);

  if ((!useFirstMoving && referenceFileList.size() != 1) || (useFirstMoving && referenceFileList.size() == 0))
  {
    MITK_ERROR << "None or more than one possible reference images (" << refPattern <<") found. Exiting." << referenceFileList.size();
    MITK_INFO  << "Choose a fixed arguement that is unique in the given folder!";
    return EXIT_FAILURE;
  }

  std::string referenceFileName = referenceFileList.at(0);

  MITK_INFO << "Loading Reference (fixed) image: " << referenceFileName;
  std::string fileType = itksys::SystemTools::GetFilenameExtension(referenceFileName);
  mitk::Image::Pointer refImage = ExtractFirstTS(mitk::IOUtil::Load<mitk::Image>(referenceFileName), fileType);
  mitk::Image::Pointer resampleReference = nullptr;
  if (doResampling)
  {
    refImage = ResampleBySpacing(refImage,spacing);
    resampleReference = refImage;
  }

  if (refImage.IsNull())
    MITK_ERROR << "Loaded fixed image is nullptr";

  // Copy reference image to destination
  std::string savePathAndFileName = GetSavePath(outputPath, referenceFileName);

  mitk::IOUtil::Save(refImage, savePathAndFileName);

  // Copy all derived resources also to output folder, adding _reg suffix
  referenceFileList = CreateDerivedFileList(referenceFileName, movingImgPattern,derPatterns);
  CopyResources(referenceFileList, outputPath);

  std::string derivedResourceFilename;
  mitk::Image::Pointer referenceMask = nullptr; // union of all segmentations

  if (!silent)
  {
    // XML Output to report progress
    std::cout << "<filter-start>";
    std::cout << "<filter-name>Batched Registration</filter-name>";
    std::cout << "<filter-comment>Starting registration ... </filter-comment>";
    std::cout << "</filter-start>";
  }

  // Now iterate over all files and register them to the reference image,
  // also register derived resources based on file patterns
  // ------------------------------------------------------------------------------

  // Create File list

  FileListType movingImagesList = CreateFileList(inputPath, movingImgPattern);

  for (unsigned int i =0; i < movingImagesList.size(); i++)
  {
    std::string fileMorphName = movingImagesList.at(i);
    if (fileMorphName == referenceFileName)
    {

      // do not process reference image again
      continue;
    }
    MITK_INFO << "Processing image " << fileMorphName;

    // 1 Register morphological file to reference image

    if (!itksys::SystemTools::FileExists(fileMorphName.c_str()))
    {
      MITK_WARN << "File does not exit. Skipping entry.";
      continue;
    }
    // Origin of images is cancelled
    // TODO make this optional!!
    double transf[6];
    double offset[3];
    {
      std::string fileType = itksys::SystemTools::GetFilenameExtension(fileMorphName);
      mitk::Image::Pointer movingImage = ExtractFirstTS(mitk::IOUtil::Load<mitk::Image>(fileMorphName), fileType);

      if (movingImage.IsNull())
        MITK_ERROR << "Loaded moving image is nullptr";

      // Store transformation,  apply it to morph file
      MITK_INFO << "----------Registering moving image to reference----------";

      mitk::RegistrationWrapper::GetTransformation(refImage, movingImage, transf, offset, alignOrigin, referenceMask);
      mitk::RegistrationWrapper::ApplyTransformationToImage(movingImage, transf,offset, resampleReference); // , resampleImage

      savePathAndFileName = GetSavePath(outputPath, fileMorphName);
      if (fileType == ".dwi")
        fileType = "dwi";

      {
        mitk::ImageReadAccessor readAccess(movingImage);

        if (readAccess.GetData() == nullptr)
          MITK_INFO <<"POST DATA is null";
      }

      mitk::IOUtil::Save(movingImage, savePathAndFileName);
    }

    if (!silent)
    {
      std::cout << "<filter-progress-text progress=\"" <<
                   (float)i / (float)movingImagesList.size()
                << "\" >.</filter-progress-text>";
    }

    // Now parse all derived resource and apply the above calculated transformation to them
    // ------------------------------------------------------------------------------------

    FileListType fList = CreateDerivedFileList(fileMorphName, movingImgPattern,derPatterns);
    if (fList.size() > 0)
      MITK_INFO << "----------DERIVED RESOURCES ---------";
    for (unsigned int j=0; j < fList.size(); j++)
    {
      derivedResourceFilename = fList.at(j);
      MITK_INFO << "----Processing derived resorce " << derivedResourceFilename << " ...";
      std::string fileType = itksys::SystemTools::GetFilenameExtension(derivedResourceFilename);
      mitk::Image::Pointer derivedMovingResource = ExtractFirstTS(mitk::IOUtil::Load<mitk::Image>(derivedResourceFilename), fileType);
      // Apply transformation to derived resource, treat derived resource as binary
      mitk::RegistrationWrapper::ApplyTransformationToImage(derivedMovingResource, transf,offset, resampleReference,isBinary);

      savePathAndFileName = GetSavePath(outputPath, derivedResourceFilename);
      mitk::IOUtil::Save(derivedMovingResource, savePathAndFileName);
    }
  }

  if (!silent)
    std::cout << "<filter-end/>";
  return EXIT_SUCCESS;
}