Пример #1
0
    void IntensityForce::ComputeIntensityForce(ParticleSystem* system) {
        ParticleSubjectArray& shapes = system->GetSubjects();
        const ParticleSubject& meanSubject = system->GetMeanSubject();
        
        const int nSubj = shapes.size();
        const int nPoints = shapes[0].GetNumberOfPoints();
        const int nRadius = 2;
        const int nAttrsPerPoint = ::pow(2*nRadius+1, __Dim);
        const int nAttrs = nPoints * nAttrsPerPoint;

        DoubleImageVector warpedImages(nSubj);
        VectorImageVector gradImages(nSubj);

        double* attrs = new double[nSubj * nAttrs];
        double* gradAttrs = new double[nSubj * nAttrs * __Dim];
        double* force = new double[nSubj * nPoints * __Dim];

        // first create a warped image into the mean transform space
        // second create a gradient vector image per subject
        // third extract attributes (features)
        for (int i = 0; i < nSubj; i++) {
            ParticleSubject& subject = shapes[i];
            ParticleBSpline bspline;
            bspline.SetReferenceImage(m_ImageContext->GetLabel(i));
            bspline.EstimateTransform(meanSubject, subject);
            FieldTransformType::Pointer fieldTransform = bspline.GetTransform();
            CompositeTransformType::Pointer transform = CompositeTransformType::New();
            transform->AddTransform(fieldTransform);
            subject.m_InverseTransform = transform;
            warpedImages[i] = bspline.WarpImage(m_ImageContext->GetDoubleImage(i));
            GradientFilterType::Pointer grad = GradientFilterType::New();
            grad->SetInput(warpedImages[i]);
            grad->Update();
            gradImages[i] = grad->GetOutput();

            // extract attributes
            DoubleImage::SizeType radius;
            radius.Fill(nRadius);
            DoubleImageNeighborhoodIteratorType iiter(radius, warpedImages[i], warpedImages[i]->GetBufferedRegion());
            VectorImageNeighborhoodIteratorType giter(radius, gradImages[i], gradImages[i]->GetBufferedRegion());

            DoubleImage::IndexType idx;
            #pragma omp parallel for
            for (int j = 0; j < nPoints; j++) {
                Particle& par = subject.m_Particles[j];
                fordim(k) {
                    idx[k] = round(par.y[k]);
                }
                iiter.SetLocation(idx);
                giter.SetLocation(idx);

                double* jAttrs = &attrs[i * nAttrs + j * nAttrsPerPoint];
                double* jAttrsGrad = &gradAttrs[(i * nAttrs + j * nAttrsPerPoint) * __Dim];
                const int numAttrs = iiter.Size();
                assert(numAttrs == nAttrsPerPoint);
                for (int k = 0; k < nAttrsPerPoint; k++) {
                    double pixel = iiter.GetPixel(k);
                    VectorType grad = giter.GetPixel(k);
                    jAttrs[k] = pixel;
                    for (int m = 0; m < __Dim; m++) {
                        jAttrsGrad[m] = grad[m];
                    }
                }
            }
        }


        // column sum
        double* sumAttrs = new double[nPoints * nAttrsPerPoint];
        #pragma omp parallel for
        for (int j = 0; j < nAttrs; j++) {
            sumAttrs[j] = 0;
            for (int i = 0; i < nSubj; i++) {
                sumAttrs[j] += attrs[i * nAttrs + j];
            }
        }

        // compute mean differences
        for (int i = 0; i < nSubj; i++) {
            #pragma omp parallel for
            for (int j = 0; j < nAttrs; j++) {
                attrs[i * nAttrs + j] -= (sumAttrs[j] / nSubj);
            }
        }

        // compute force direction
        memset(force, 0, nSubj * nPoints * __Dim * sizeof(double));
        for (int i = 0; i < nSubj; i++) {
            #pragma omp parallel for
            for (int j = 0; j < nPoints; j++) {
                double* forcePtr = &force[(i * nPoints + j) * __Dim];
                for (int k = 0; k < nAttrsPerPoint; k++) {
                    double attr = attrs[i * nAttrs + j * nAttrsPerPoint];
                    double* gradAttrPtr = &gradAttrs[(i * nAttrs + j * nAttrsPerPoint + k) * __Dim];
                    for (int m = 0; m < __Dim; m++) {
                        forcePtr[m] += attr * gradAttrPtr[m];
                    }
                }
            }
        }


        // compute force at subject space
        for (int i = 0; i < nSubj; i++) {
            CompositeTransformType::Pointer transform = shapes[i].m_InverseTransform;
            for (int j = 0; j < nPoints; j++) {
                double* forcePtr = &force[(i * nPoints + j) * __Dim];
                CompositeTransformType::InputPointType x;
                fordim(k) {
                    x[k] = shapes[i][j].x[k];
                }
                CompositeTransformType::JacobianType jac;
                jac.set_size(__Dim, __Dim);
                transform->ComputeInverseJacobianWithRespectToPosition(x, jac);
                fordim(k) {
                    double ff = 0;
                    if (__Dim == 3) {
                        ff = jac[0][k]*forcePtr[k] + jac[1][k]*forcePtr[k] + jac[2][k]*forcePtr[k];
                    } else if (__Dim == 2) {
                        ff = jac[0][k]*forcePtr[k] + jac[1][k]*forcePtr[k];
                    }
                    forcePtr[k] = ff;
                }
            }
        }

        for (int i = 0; i < nSubj; i++) {
            ParticleSubject& subj = shapes[i];
            for (int j = 0; j < nPoints; j++) {
                Particle& par = subj[j];
                VNLVector ff(__Dim);
                fordim (k) {
                    const int forceIdx = (i * nPoints + j) * __Dim;
                    ff[k] = force[forceIdx + k];
                }
                ff.normalize();
                par.SubForce(ff.data_block(), m_Coeff);
            }
        }


        delete[] force;
        delete[] attrs;
        delete[] gradAttrs;
    }
Пример #2
0
int main(int argc, char *argv[])
{
	std::string inputFilenamesFilename = argv[1];
	double keyPointIntensityThreshold = atof(argv[2]);
	double dogSplitsPerOctave = atof(argv[3]);
	double statingScale = atof(argv[4]);
	double eLocation = atof(argv[5]);
	double eScale = std::log(atof(argv[6]));
	double eOrientation = atof(argv[7]);
	double gammaValue = atof(argv[8]);

	std::string distanceMapFilenamesFilename = argv[9];
	double extractionDistanceThreshold = atof(argv[10]);




	// load up the set of aligned images
	FilenamesReader::FilenamesType inputFilenames = FilenamesReader::Read(inputFilenamesFilename);
	FilenamesReader::FilenamesType distanceMapFilenames   = FilenamesReader::Read(distanceMapFilenamesFilename);
	ImageVolumeList images;
	RealVolumeList distanceMaps;
	for(unsigned int i = 0; i < inputFilenames.size(); i++)
	{
		ImageVolume::Pointer image = ImageVolumeIO::Read(inputFilenames[i]);
		images.push_back(image);
		RealVolume::Pointer distMap = RealVolumeIO::Read(distanceMapFilenames[i]);
		distanceMaps.push_back(distMap);
	}


	unsigned int sliceToTest = 7;

	// for each slice we want to learn the features
	const unsigned int sliceNum = images.front()->GetLargestPossibleRegion().GetSize()[2];
	for(unsigned int slice = sliceToTest; slice < sliceNum; slice++)
	{

		// get the set of slices that have some image data in them
		ImageSliceList validImages;
		RealSliceList validDistanceMaps;
		
		for(unsigned int im = 0; im < images.size(); im++)
		{
			ImageSlice::Pointer extractedSlice = ImageSlice::New();
			RealSlice::Pointer distanceSlice = RealSlice::New();
			ExtractSlice<ImageVolume, ImageSlice>(images[im], slice, extractedSlice);
			ExtractSlice<RealVolume, RealSlice>(distanceMaps[im], slice, distanceSlice);

			if(ImageContainsData(extractedSlice))
			{
				validDistanceMaps.push_back(distanceSlice);
				validImages.push_back(extractedSlice);
			}
		}

		/*
		if(validImages.size() < 3)
			continue;
		*/

		std::cout << "Slice Num: " << slice << " Image Number: " << validImages.size() << std::endl;



		typedef itk::Vector<double, 2> VectorType;
		typedef itk::Image<VectorType, 2> GradientType;
		typedef filter::HistogramOfGradeintsFeatureExtractor<GradientType> FeatureBuilderType;
		typedef FeatureBuilderType::FeatureType HoGFeatureType;
		std::vector<HoGFeatureType> allFeatures;
		std::vector<HoGFeatureType> allFeatures1;
		std::vector<HoGFeatureType> allFeatures2;

		

		unsigned int featureCount = 0;
		for(unsigned int im = 0; im < validImages.size(); im++)
		{
			ImageSlice::Pointer extractedSlice = validImages[im];

			// first we extract all of the keypoints points
			typedef filter::DoGKeyPointExtractor<utils::ImageSlice> ExtractorType;
			ExtractorType::Pointer extractor = ExtractorType::New();
			extractor->SetInput(extractedSlice);
			extractor->SetKeypointThreshold(keyPointIntensityThreshold);
			extractor->SetSplitsPerOctave(dogSplitsPerOctave);
			extractor->SetStartingSigma(statingScale);
			extractor->SetDistanceMap(validDistanceMaps[im]);
			extractor->SetDistanceThreshold(extractionDistanceThreshold);
			extractor->Update();

			// orientate the feature points
			typedef filter::KeyPointOrientator<utils::ImageSlice> Orientator;
			Orientator::Pointer orientator  = Orientator::New();
			orientator->SetInput(extractedSlice);
			orientator->SetKeyPoints(extractor->GetOutput());
			orientator->SetHistogramBins(32);
			orientator->SetSigmaScale(2);
			orientator->SetSampleRadius(5);
			orientator->Update();

			Orientator::OrientatedKeyPointMap orientateKeyPoints = orientator->GetOutput();


			

			// now we go through the features and compute the HOG descriptors
			Orientator::OrientatedKeyPointMap::iterator keyPointIt = orientateKeyPoints.begin();
			std::cout << orientateKeyPoints.size() << std::endl;
			while(keyPointIt != orientateKeyPoints.end())
			{
				double sigma = keyPointIt->first;
				Orientator::OrientatedKeyPointList keyPoints = keyPointIt->second;

				// smooth the image to the sigma level
				typedef itk::DiscreteGaussianImageFilter<utils::ImageSlice, utils::RealSlice> Smoother;
				Smoother::Pointer smoother = Smoother::New();
				smoother->SetInput(extractedSlice);
				smoother->SetVariance(sigma*sigma);
				smoother->SetUseImageSpacingOn();

				typedef itk::GradientRecursiveGaussianImageFilter<RealSlice, GradientType> GradientFilterType;
				GradientFilterType::Pointer gradientFilter = GradientFilterType::New();
				gradientFilter->SetInput(smoother->GetOutput());
				//gradientFilter->SetSigma(sigma);
				gradientFilter->Update();



		
				


				std::cout << "Doing Sigma " << sigma << " Key Point Number: " << keyPoints.size() << std::endl;



				for(unsigned int fnum = 0; fnum < keyPoints.size(); fnum++)
				{
					Orientator::OrientatedKeyPoint keyPoint = keyPoints[fnum];

					// build the tranform
					typedef itk::CenteredRigid2DTransform<double> TransformType;
					TransformType::Pointer transform = TransformType::New();
					transform->SetCenter(keyPoint.location);
					transform->SetAngleInDegrees(360-keyPoint.degrees);

					// extract the patch from the gradient image
					typedef filter::ImagePatchExtractor<GradientType> PatchExtractorType;
					PatchExtractorType::Pointer patchExtractor = PatchExtractorType::New();
					patchExtractor->SetInput(gradientFilter->GetOutput());
					patchExtractor->SetTransform(transform);
					patchExtractor->SetScale(keyPoint.scale*2);

					PatchExtractorType::SizeType patchSize;
					patchSize.Fill(10);
					patchExtractor->SetPatchSize(patchSize);
					patchExtractor->SetInputPoint(keyPoint.location);
					patchExtractor->Update();


					/*
					// validate the keypoint
					typedef filter::StructureTensorKeyPointValidator<utils::ImageSlice> ValidatorType;
					ValidatorType::Pointer validator = ValidatorType::New();
					validator->SetInput(extractedSlice);	
					validator->SetKeyPoint(keyPoint);
					validator->SetRatio(validatorBeta);
					validator->Update();


					bool valid = validator->IsValid();
					*/


					// create the descriptor
					FeatureBuilderType::Pointer builder = FeatureBuilderType::New();
					builder->SetInput(patchExtractor->GetOutput());
					builder->SetOrientationBins(8);
					builder->SetKeyPoint(keyPoint);
					builder->Update();


					// add the feature to the list
					FeatureBuilderType::FeatureType feature = builder->GetOutput();
					feature.featureId = featureCount;
					allFeatures.push_back(feature);

					featureCount++;

				}



				
				keyPointIt++;
			}
		}

		




		std::cout << "Computing Distance Matrix" << std::endl;
		// compute the distance matrix
		typedef utils::DoubleMatrixType MatrixType;
		MatrixType distanceMatrix = MatrixType::Zero(allFeatures.size(), allFeatures.size());
		ComputeDifferenceMatrix(allFeatures, distanceMatrix);


		std::cout << "Grouping Features" << std::endl;
		// now we group the features by thier geometry
		typedef filter::FeaturePointGrouper<2> GrouperType;
		GrouperType::Pointer grouper = GrouperType::New();
		grouper->SetInput(allFeatures);
		grouper->SetAngleThreshold(eOrientation);
		grouper->SetLocationThreshold(eLocation);
		grouper->SetScaleThreshold(eScale);
		grouper->Update();





		std::cout << "Creating Clusters" << std::endl;
		GrouperType::FeatureGroupList clusters = grouper->GetOutput();
		std::sort(clusters.begin(), clusters.end());

		GrouperType::FeatureGroupList newClusters;

		for(unsigned int i = 0; i < clusters.size(); i++)
		{
			typedef filter::FeatureClusterLearner<2> ClusterLearnerType;
			ClusterLearnerType::Pointer learner = ClusterLearnerType::New();
			learner->SetInput(clusters[i]);
			learner->SetFeatures(allFeatures);
			learner->SetDistanceMatrix(distanceMatrix);
			learner->SetGamma(gammaValue);
			learner->Update();
			
			ClusterLearnerType::ClusterType newCluster = learner->GetOutput();
			newClusters.push_back(newCluster);



		}

		std::cout << "Culling Clusters" << std::endl;
		
		typedef filter::FeatureClusterCuller<2> Culler;
		Culler::Pointer culler = Culler::New();
		culler->SetInput(newClusters);
		culler->Update();


		Culler::ClusterList culledClusters = culler->GetOutput();
		std::sort(culledClusters.begin(), culledClusters.end());
		for(unsigned int i = 0; i < culledClusters.size(); i++)
		{
			typedef filter::ClusterDistributionGenerator<2> DistributionGenerator;
			DistributionGenerator::Pointer generator = DistributionGenerator::New();
			generator->SetInput(culledClusters[i]);
			generator->Update();

			exit(1);
		}



		/*


		ImageSlice::Pointer extractedSlice = ImageSlice::New();
		ExtractSlice<ImageVolume, ImageSlice>(images[0], sliceToTest, extractedSlice);

		std::vector<std::pair<int, ImageSlice::PointType> > testOut;
		for(unsigned int i = 0; i < culledClusters.size(); i++)
		{
			for(unsigned int j = 0; j < culledClusters[i].clusterMembers.size(); j++)
			{
			
				std::pair<int, ImageSlice::PointType> p(i, culledClusters[i].clusterMembers[j].keyPoint.location);
				testOut.push_back(p);				
			}			
		}




		utils::DoubleMatrixType pOut = utils::DoubleMatrixType::Zero(testOut.size(), 2);
		utils::IntMatrixType iOut = utils::IntMatrixType::Zero(testOut.size(),1);
		for(unsigned int i = 0; i < testOut.size(); i++)
		{
			itk::ContinuousIndex<double, 2> contIndex;
			extractedSlice->TransformPhysicalPointToContinuousIndex(testOut[i].second, contIndex);

			pOut(i,0) = contIndex[0];			
			pOut(i,1) = contIndex[1];

			iOut(i,0) = testOut[i].first;			
		}


	


		utils::MatrixDataSet::Pointer dout = utils::MatrixDataSet::New();
		dout->AddData("locations", pOut);
		dout->AddData("index", iOut);
		utils::MatrixWriter::Pointer writer = utils::MatrixWriter::New();
		writer->SetInput(dout);
		writer->SetFilename("data.hdf5");
		writer->Update();



		exit(1);
		*/

		


		/*
		// compute the affinity matrix between all of the features
		unsigned int numFeatures = allFeatures.size();
		double sum = 0.0;
		int count = 0;
		int max = 0;
		int maxId = 0;
		std::vector<int> counts;

		std::vector<Cluster> allGroups;

		// groupd all the features that have a similar location / scale / orientation
		for(unsigned int i = 0; i < numFeatures; i++)
		{
			Cluster cluster;
			cluster.featureIndex = i;
			cluster.feature = allFeatures[i];
			cluster.e = 0.0;
			cluster.members.push_back(allFeatures[i]);
			
			for(unsigned int j = 0; j < numFeatures; j++)
			{
				if(i == j) continue;
				if(AreSimilar(allFeatures[i], allFeatures[j],
							eLocation, eOrientation, eScale))
				{
					cluster.members.push_back(allFeatures[j]);
				}
			}
			
			counts.push_back(cluster.members.size());
			if(cluster.members.size() > max)
			{
				max = cluster.members.size();
				maxId = i;
			}

			allGroups.push_back(cluster);
			sum += cluster.members.size();
			std::sort(cluster.members.begin(), cluster.members.end(), member_sort);
			count++;
		}



		std::sort(counts.begin(), counts.end());
		for(unsigned int i = 0; i < counts.size(); i++)
		{
			std::cout << counts[i] << std::endl;
		}


		// compute the difference matrix
		utils::DoubleMatrixType diffMat;
		ComputeDifferenceMatrix(allFeatures, diffMat);

		// loop through the groups to form the clusters
		std::vector<Cluster> allClusters;
		for(unsigned int i = 0; i < allGroups.size(); i++)
		{
			Cluster cluster;
			CreateCluster(allGroups[i], allFeatures, diffMat, cluster);
			allClusters.push_back(cluster);
		}

		// remove duplicates
		std::vector<int> toRemove;
		for(unsigned int i = 0; i < allClusters.size(); i++)
		{
			bool duplicate = false;
			for(unsigned int j = i; j < allClusters.size(); j++)
			{
				if(i == j) continue;
				if(allClusters[i].members.size() != allClusters[j].members.size())
					continue;

				bool sameMembers = true;
				for(unsigned int k = 0; k < allClusters[i].members.size(); k++)
				{
					if(allClusters[i].members[k].index != allClusters[j].members[k].index)
					{
						sameMembers = false;				
						break;
					}
				}

				if(sameMembers)
				{
					duplicate = true;
				}

				if(duplicate) break;
			}
			if(duplicate) toRemove.push_back(i);

		}

		
		std::cout << allClusters.size() << std::endl;
		for(unsigned int i = 0; i < toRemove.size(); i++)
		{
	//		allClusters.erase(allGroups.begin()+(toRemove[i]-i));
		}
		std::cout << allClusters.size() << std::endl;

		// trim the clusters
		std::vector<Cluster> trimmedClusters;
		TrimClusters(allClusters, trimmedClusters);
		std::cout << trimmedClusters.size() << std::endl;

		std::vector<std::pair<int, ImageSlice::PointType> > testOut;


		for(unsigned int i = 0; i < trimmedClusters.size(); i++)
		{
			for(unsigned int j = 0; j < trimmedClusters[i].members.size(); j++)
			{
				std::pair<int, ImageSlice::PointType> p(i, trimmedClusters[i].members[j].location);
				testOut.push_back(p);				
			}			
			std::cout << trimmedClusters[i].members.size() << std::endl;
		}

		ImageSlice::Pointer extractedSlice = ImageSlice::New();
		ExtractSlice<ImageVolume, ImageSlice>(images[0], sliceToTest, extractedSlice);


		utils::DoubleMatrixType pOut = utils::DoubleMatrixType::Zero(testOut.size(), 2);
		utils::IntMatrixType iOut = utils::IntMatrixType::Zero(testOut.size(),1);
		for(unsigned int i = 0; i < testOut.size(); i++)
		{
			itk::ContinuousIndex<double, 2> contIndex;
			extractedSlice->TransformPhysicalPointToContinuousIndex(testOut[i].second, contIndex);

			pOut(i,0) = contIndex[0];			
			pOut(i,1) = contIndex[1];
			
			// compute the image indexes


			
	
			iOut(i,0) = testOut[i].first;			
		}


	


		utils::MatrixDataSet::Pointer dout = utils::MatrixDataSet::New();
		dout->AddData("locations", pOut);
		dout->AddData("index", iOut);
		utils::MatrixWriter::Pointer writer = utils::MatrixWriter::New();
		writer->SetInput(dout);
		writer->SetFilename("data.hdf5");
		writer->Update();
		exit(1);
		*/
	}

	return 0;
}
void CriminisiInpainting::ComputeBoundaryNormals()
{
  try
  {
    // Blur the mask, compute the gradient, then keep the normals only at the original mask boundary
    
    if(this->DebugImages)
      {
      Helpers::WriteImage<UnsignedCharScalarImageType>(this->BoundaryImage, "Debug/ComputeBoundaryNormals.BoundaryImage.mha");
      Helpers::WriteImage<Mask>(this->CurrentMask, "Debug/ComputeBoundaryNormals.CurrentMask.mha");
      }
      
    // Blur the mask
    typedef itk::DiscreteGaussianImageFilter< Mask, FloatScalarImageType >  BlurFilterType;
    BlurFilterType::Pointer gaussianFilter = BlurFilterType::New();
    gaussianFilter->SetInput(this->CurrentMask);
    gaussianFilter->SetVariance(2);
    gaussianFilter->Update();

    if(this->DebugImages)
      {
      Helpers::WriteImage<FloatScalarImageType>(gaussianFilter->GetOutput(), "Debug/ComputeBoundaryNormals.BlurredMask.mha");
      }

    // Compute the gradient of the blurred mask
    typedef itk::GradientImageFilter< FloatScalarImageType, float, float>  GradientFilterType;
    GradientFilterType::Pointer gradientFilter = GradientFilterType::New();
    gradientFilter->SetInput(gaussianFilter->GetOutput());
    gradientFilter->Update();

    if(this->DebugImages)
      {
      Helpers::WriteImage<FloatVector2ImageType>(gradientFilter->GetOutput(), "Debug/ComputeBoundaryNormals.BlurredMaskGradient.mha");
      }

    // Only keep the normals at the boundary
    typedef itk::MaskImageFilter< FloatVector2ImageType, UnsignedCharScalarImageType, FloatVector2ImageType > MaskFilterType;
    MaskFilterType::Pointer maskFilter = MaskFilterType::New();
    //maskFilter->SetInput1(gradientFilter->GetOutput());
    //maskFilter->SetInput2(this->BoundaryImage);
    maskFilter->SetInput(gradientFilter->GetOutput());
    maskFilter->SetMaskImage(this->BoundaryImage);
    maskFilter->Update();

    if(this->DebugImages)
      {
      Helpers::WriteImage<FloatVector2ImageType>(maskFilter->GetOutput(), "Debug/ComputeBoundaryNormals.BoundaryNormalsUnnormalized.mha");
      }
      
    //this->BoundaryNormals = maskFilter->GetOutput();
    //this->BoundaryNormals->Graft(maskFilter->GetOutput());
    Helpers::DeepCopy<FloatVector2ImageType>(maskFilter->GetOutput(), this->BoundaryNormals);

    // Normalize the vectors because we just care about their direction (the Data term computation calls for the normalized boundary normal)
    itk::ImageRegionIterator<FloatVector2ImageType> boundaryNormalsIterator(this->BoundaryNormals, this->BoundaryNormals->GetLargestPossibleRegion());
    itk::ImageRegionConstIterator<UnsignedCharScalarImageType> boundaryIterator(this->BoundaryImage, this->BoundaryImage->GetLargestPossibleRegion());

    while(!boundaryNormalsIterator.IsAtEnd())
      {
      if(boundaryIterator.Get()) // The pixel is on the boundary
        {
        FloatVector2ImageType::PixelType p = boundaryNormalsIterator.Get();
        p.Normalize();
        boundaryNormalsIterator.Set(p);
        }
      ++boundaryNormalsIterator;
      ++boundaryIterator;
      }

    if(this->DebugImages)
      {
      Helpers::WriteImage<FloatVector2ImageType>(this->BoundaryNormals, "Debug/ComputeBoundaryNormals.BoundaryNormals.mha");
      }
  }
  catch( itk::ExceptionObject & err )
  {
    std::cerr << "ExceptionObject caught in ComputeBoundaryNormals!" << std::endl;
    std::cerr << err << std::endl;
    exit(-1);
  }
}
void CriminisiInpainting::ComputeIsophotes()
{
  
  try
  {
    Helpers::DebugWriteImageConditional<FloatVectorImageType>(this->CurrentImage, "Debug/ComputeIsophotes.input.mha", this->DebugImages);
    
    /*
    // This only works when the image is RGB
    typedef itk::VectorMagnitudeImageFilter<FloatVectorImageType, UnsignedCharScalarImageType>  VectorMagnitudeFilterType;
    VectorMagnitudeFilterType::Pointer magnitudeFilter = VectorMagnitudeFilterType::New();
    magnitudeFilter->SetInput(this->OriginalImage); // We use the original image here because the image that has been painted green inside the hole has a strong gradient around the hole.
    magnitudeFilter->Update();
    */
    RGBImageType::Pointer rgbImage = RGBImageType::New();
    Helpers::VectorImageToRGBImage(this->OriginalImage, rgbImage);
    
    Helpers::DebugWriteImageConditional<RGBImageType>(rgbImage, "Debug/ComputeIsophotes.rgb.mha", this->DebugImages);

    typedef itk::RGBToLuminanceImageFilter< RGBImageType, FloatScalarImageType > LuminanceFilterType;
    LuminanceFilterType::Pointer luminanceFilter = LuminanceFilterType::New();
    luminanceFilter->SetInput(rgbImage);
    luminanceFilter->Update();
  
    Helpers::DebugWriteImageConditional<FloatScalarImageType>(luminanceFilter->GetOutput(), "Debug/ComputeIsophotes.luminance.mha", this->DebugImages);
    
    // Blur the image to compute better gradient estimates
    typedef itk::DiscreteGaussianImageFilter<FloatScalarImageType, FloatScalarImageType >  BlurFilterType;
    BlurFilterType::Pointer blurFilter = BlurFilterType::New();
    blurFilter->SetInput(luminanceFilter->GetOutput());
    blurFilter->SetVariance(2);
    blurFilter->Update();

    Helpers::DebugWriteImageConditional<FloatScalarImageType>(blurFilter->GetOutput(), "Debug/ComputeIsophotes.blurred.mha", true);
    
    // Compute the gradient
    // Template parameters are <TInputImage, TOperatorValueType, TOutputValueType>
    typedef itk::GradientImageFilter<FloatScalarImageType, float, float>  GradientFilterType;
    GradientFilterType::Pointer gradientFilter = GradientFilterType::New();
    gradientFilter->SetInput(blurFilter->GetOutput());
    gradientFilter->Update();

    Helpers::DebugWriteImageConditional<FloatVector2ImageType>(gradientFilter->GetOutput(), "Debug/ComputeIsophotes.gradient.mha", this->DebugImages);
 
    // Rotate the gradient 90 degrees to obtain isophotes from gradient
    typedef itk::UnaryFunctorImageFilter<FloatVector2ImageType, FloatVector2ImageType,
    RotateVectors<
      FloatVector2ImageType::PixelType,
      FloatVector2ImageType::PixelType> > FilterType;

    FilterType::Pointer rotateFilter = FilterType::New();
    rotateFilter->SetInput(gradientFilter->GetOutput());
    rotateFilter->Update();

    Helpers::DebugWriteImageConditional<FloatVector2ImageType>(rotateFilter->GetOutput(), "Debug/ComputeIsophotes.rotatedGradient.mha", this->DebugImages);
      
    // Mask the isophote image with the expanded version of the inpainting mask.
    // That is, keep only the values outside of the expanded mask. To do this, we have to first invert the mask.

    // Invert the mask
    typedef itk::InvertIntensityImageFilter <Mask> InvertIntensityImageFilterType;
    InvertIntensityImageFilterType::Pointer invertMaskFilter = InvertIntensityImageFilterType::New();
    invertMaskFilter->SetInput(this->CurrentMask);
    invertMaskFilter->Update();

    if(this->DebugImages)
      {
      Helpers::WriteImage<Mask>(invertMaskFilter->GetOutput(), "Debug/ComputeIsophotes.invertedMask.mha");
      }

    //std::cout << "rotateFilter: " << rotateFilter->GetOutput()->GetLargestPossibleRegion() << std::endl;
    //std::cout << "invertMaskFilter: " << invertMaskFilter->GetOutput()->GetLargestPossibleRegion() << std::endl;
    
    // Keep only values outside the masked region
    typedef itk::MaskImageFilter< FloatVector2ImageType, Mask, FloatVector2ImageType > MaskFilterType;
    MaskFilterType::Pointer maskFilter = MaskFilterType::New();
    maskFilter->SetInput1(rotateFilter->GetOutput());
    maskFilter->SetInput2(invertMaskFilter->GetOutput());
    maskFilter->Update();

    if(this->DebugImages)
      {
      Helpers::WriteImage<FloatVector2ImageType>(maskFilter->GetOutput(), "Debug/ComputeIsophotes.maskedIsophotes.mha");
      }
      
    Helpers::DeepCopy<FloatVector2ImageType>(maskFilter->GetOutput(), this->IsophoteImage);
   
  }
  catch( itk::ExceptionObject & err )
  {
    std::cerr << "ExceptionObject caught in ComputeIsophotes!" << std::endl;
    std::cerr << err << std::endl;
    exit(-1);
  }
}
void myVtkInteractorStyleImage3D::RemoveLeaks(){
	boost::mutex::scoped_lock scoped_lock(_canSegment_mutex);
	if (!_foundLeaks){
		cout << "Final segmentation had no leaks!" << endl;
		vtkSmartPointer<vtkNIFTIImageWriter> niw = vtkSmartPointer<vtkNIFTIImageWriter>::New();
		vtkStructuredPoints* saveMat = vtkStructuredPoints::New();
		cout << "At filtering." << endl;
		saveMat->DeepCopy(_selection);
		vtkUnsignedShortArray* scalars = (vtkUnsignedShortArray*)(saveMat->GetPointData()->GetScalars());
		for (int i = 0; i < saveMat->GetNumberOfPoints(); i++){
			if (scalars->GetValue(i) == BACKGROUND){
				scalars->SetValue(i, NOT_ACTIVE);
			}
		}
		scalars->Modified();
		//dilate:
		cout << "at dilation" << endl;
		vtkSmartPointer<vtkImageDilateErode3D> dilateErode =
			vtkSmartPointer<vtkImageDilateErode3D>::New();
		dilateErode->SetInputData(saveMat);
		dilateErode->SetDilateValue(FOREGROUND);
		dilateErode->SetErodeValue(NOT_ACTIVE);
		dilateErode->SetKernelSize(DILATION_ST_SIZE, DILATION_ST_SIZE, 1);
		dilateErode->ReleaseDataFlagOff();
		dilateErode->Update();
		cout << "finished dilation" << endl;

		cout << "Saving..." << endl;
		niw->SetInputData(dilateErode->GetOutput());
		niw->SetFileName("correctedImage.nii.gz");
		niw->Write();
		cout << "Final segmentation was saved successfully!" << endl;
		return;
	}
	cout << "Started fixing mesh!" << endl;
	typedef double PixelType;
	typedef unsigned short SegPixelType;
	const unsigned char dim = 3;

	typedef itk::Image<PixelType, dim> ImageType;
	typedef itk::Image<SegPixelType, dim> SegImageType;

	MeshLeaksCorrector mlc;
	ImageType::Pointer inputImage = mlc.read3DImage<ImageType>(_inputName.c_str());
	cout << "Image reading done." << endl;
	typedef itk::GradientMagnitudeImageFilter<ImageType, ImageType> GradientFilterType;
	GradientFilterType::Pointer gradientFilter = GradientFilterType::New();
	gradientFilter->SetInput(inputImage);
	gradientFilter->Update();
	ImageType::Pointer gradientInputImage = gradientFilter->GetOutput();

	cout << "Produced gradient image" << endl;
	typedef itk::ImageRegionIterator<ImageType> IteratorType;
	typedef itk::ImageRegionIterator<SegImageType> SegIteratorType;

	IteratorType gradIt(gradientFilter->GetOutput(), gradientFilter->GetOutput()->GetLargestPossibleRegion());
	cout << "GradIt" << endl;
	vtkPolyDataMapper* mapper = (vtkPolyDataMapper*)(this->GetDefaultRenderer()->GetActors()->GetLastActor()->GetMapper());
	vtkSmartPointer<vtkPolyData> vtkSegImage = vtkSmartPointer<vtkPolyData>::New();
	vtkSegImage->DeepCopy(mapper->GetInput());
	vtkSegImage->GetPointData()->SetScalars(NULL);

	cout << "Extracted mesh from actor" << endl;

	vtkSmartPointer<vtkSmoothPolyDataFilter> smoother = vtkSmoothPolyDataFilter::New();
	smoother->SetInputData(vtkSegImage);
	smoother->SetNumberOfIterations(MESH_SMOOTH_ITERATIONS);
	smoother->Update();
	vtkSmartPointer<vtkPolyData> mesh = smoother->GetOutput();
	cout << "Mesh smoothed" << endl;
	typedef itk::ImageToVTKImageFilter<ImageType> ConverterType;
	ConverterType::Pointer gradientConverter = ConverterType::New();
	gradientConverter->SetInput(inputImage);
	gradientConverter->Update();
	vtkSmartPointer<vtkImageData> vtkGradientImage = gradientConverter->GetOutput();

	cout << "Read CT image" << endl;

	vtkSmartPointer<vtkProbeFilter> probeFilter = vtkProbeFilter::New();
	probeFilter->SetSourceData(vtkGradientImage);
	probeFilter->SetInputData(mesh);
	probeFilter->Update();

	cout << "Probe finished" << endl;
	vtkSmartPointer<vtkGeometryFilter> geometryFilter = vtkGeometryFilter::New();
	geometryFilter->SetInputData(probeFilter->GetOutput());
	geometryFilter->Update();
	vtkSmartPointer<vtkPolyData> gradientMesh = geometryFilter->GetOutput();

	cout << "Geometric filter finished" << endl;

	vtkSmartPointer<vtkPolyData> minCurvatureMesh = mlc.polyDataToMinCurvature(mapper->GetInput());
	cout << "mlc.minCurv finished" << endl;

	//just temporary - don't forget to delete
	vtkSmartPointer<vtkPolyData> maxCurvatureMesh = mlc.polyDataToMaxCurvature(mapper->GetInput());
	//  --- up to here
	cout << "mlc.maaxCurv finished" << endl;
	vtkSmartPointer<vtkPolyData> minCutMeshLeaks = mlc.minCut(minCurvatureMesh, mapper->GetInput(), gradientMesh, MIN_CURVATURE_TAG, 1.0f);
	cout << "minCut finished" << endl;
	vtkSmartPointer<vtkPolyData> minCutMeshInteriorLeaks = mlc.minCut(maxCurvatureMesh, mapper->GetInput(), gradientMesh, MAX_CURVATURE_TAG, 1.0f);
	cout << "minCut Interior finished" << endl;
	vtkSmartPointer<vtkPolyData> minCutMesh = mlc.minCutConjunction(minCutMeshLeaks, minCutMeshInteriorLeaks);
	cout << "Conjunction finished" << endl;
	mlc.attributeDilation(minCutMesh, ATTRIBUTE_DILATION_RADIUS);
	cout << "dilation finished" << endl;
	vtkSmartPointer<vtkPolyData> correctedMesh1 = mlc.laplaceInterpolation(minCutMesh);
	cout << "laplace finished" << endl;
	vtkSmartPointer<vtkPolyDataNormals> normals = vtkPolyDataNormals::New();
	normals->SetInputData(correctedMesh1);
	normals->FlipNormalsOn();
	normals->Update();
	vtkSmartPointer<vtkPolyData> correctedMesh = normals->GetOutput();
	cout << "Finished fixing mesh! Wow...." << endl;
	cout << "Writing mesh to file! laplaceMesh.vtk" << endl;
	mlc.writePolyData(correctedMesh, "laplaceMesh.vtk");
	vtkSmartPointer<vtkDecimatePro> decimateFilter = vtkDecimatePro::New();
	decimateFilter->SetInputData(correctedMesh);
	decimateFilter->SetTargetReduction(DECIMATION_FACTOR);
	decimateFilter->PreserveTopologyOn();
	decimateFilter->Update();
	vtkSmartPointer<vtkPolyData> decimatedMesh = decimateFilter->GetOutput();
	cout << "Writing mesh to file! decimatedMesh.vtk" << endl;
	mlc.writePolyData(decimatedMesh, "decimatedMesh.vtk");
	//2.5D NEW code
	//Seg input image is the selection_structured_points. (upcast to vtkImageData)
	typedef itk::VTKImageToImageFilter<SegImageType> SegConverterType;
	SegConverterType::Pointer converter = SegConverterType::New();
	cout << "bedug 1" << endl;
	// selection is not NULL (checked...)
	cout << "this->_selection->GetScalarType(): " << this->_selection->GetScalarType() << endl;
	if (this->_selection->GetScalarType() == VTK_INT){
		cout << "I have INTs instead of shorts for some reason!" << endl;
	}
	converter->SetInput(this->_selection);
	cout << "deb 5" << endl;
	converter->Update(); // 
	cout << "bedug 2" << endl;
	SegImageType::Pointer segInputImage = converter->GetOutput();
	cout << "bedug 3" << endl;
	SegImageType::Pointer outputContourImage = mlc.sampleMeshOnImage<SegImageType>(decimatedMesh, segInputImage);
	cout << "bedug 4" << endl;
	vtkSmartPointer<vtkDecimatePro> decimateFilter2 = vtkDecimatePro::New();
	decimateFilter2->SetInputData(mapper->GetInput());
	decimateFilter2->SetTargetReduction(DECIMATION_FACTOR);
	decimateFilter2->PreserveTopologyOn();
	decimateFilter2->Update();
	SegImageType::Pointer seedInputImage = mlc.sampleMeshOnImage<SegImageType>(decimateFilter2->GetOutput(), segInputImage);
	cout << "bedug 5" << endl;
	SegImageType::Pointer outputImage = mlc.correctImage<SegImageType>(segInputImage, seedInputImage, outputContourImage, _numTumors);

	//Here we should dilate:
	cout << "at dilation" << endl;
	typedef itk::BinaryBallStructuringElement<SegPixelType,3> StructuringElementType;
	StructuringElementType structuringElement;
	structuringElement.SetRadius(DILATION_ST_SIZE);
	structuringElement.CreateStructuringElement();

	typedef itk::BinaryDilateImageFilter<SegImageType, SegImageType, StructuringElementType>
		BinaryDilateImageFilterType;

	BinaryDilateImageFilterType::Pointer dilateFilter
		= BinaryDilateImageFilterType::New();
	dilateFilter->SetInput(outputImage);
	dilateFilter->SetKernel(structuringElement);
	dilateFilter->Update();
	outputImage = dilateFilter->GetOutput();
	cout << "finished dilation" << endl;

	//dilation end.

	typedef itk::ImageFileWriter<SegImageType> WriterType;
	WriterType::Pointer writer = WriterType::New();
	cout << "bedug 6" << endl;
	writer->SetInput(outputImage);
	cout << "Writing mesh to file! correctedImage.nii.gz" << endl;
	writer->SetFileName("correctedImage.nii.gz");
	try{
		writer->Update();
	}
	catch (itk::ExceptionObject & excp)
	{
		std::cerr << "writing input image exception thrown" << std::endl;
		std::cerr << excp << std::endl;
		exit(1);
	}

	typedef itk::ImageToVTKImageFilter<SegImageType> SegInvConverterType;
	SegInvConverterType::Pointer correctionConverter = SegInvConverterType::New();
	cout << "bedug 7" << endl;
	correctionConverter->SetInput(outputImage);
	correctionConverter->Update();
	vtkSmartPointer<vtkImageData> vtkCorrectedImage = correctionConverter->GetOutput();
	vtkUnsignedShortArray* scalars = (vtkUnsignedShortArray*)(vtkCorrectedImage->GetPointData()->GetScalars());
	vtkUnsignedShortArray* selection_scalars = (vtkUnsignedShortArray*)(_selection->GetPointData()->GetScalars());
	for (int i = 0; i < _selection->GetNumberOfPoints(); i++){	
		if (scalars->GetValue(i) != NOT_ACTIVE){
			//cout << scalars->GetValue(i) << endl;
			selection_scalars->SetValue(i, FOREGROUND);
		}
		else{
			selection_scalars->SetValue(i, NOT_ACTIVE);
		}
	}
	selection_scalars->Modified();
	this->_selection->Modified();

	cout << "bedug 8" << endl;
	vtkSmartPointer<vtkPolyData> outputMesh = mlc.createAndSmoothSurface(vtkCorrectedImage, 50);

	vtkUnsignedShortArray* meshScalars = vtkUnsignedShortArray::New();// (vtkUnsignedShortArray*)(outputMesh->GetPointData()->GetScalars());
	meshScalars->SetNumberOfComponents(1);
	meshScalars->SetNumberOfTuples(outputMesh->GetNumberOfPoints());
	cout <<"outputMesh->GetNumberOfPoints()"<< outputMesh->GetNumberOfPoints() << endl;
	for (int i = 0; i < outputMesh->GetNumberOfPoints(); i++){
		meshScalars->SetValue(i, NOT_ACTIVE);
	}
	meshScalars->SetName("mesh_colors");
	meshScalars->Modified(); 
	outputMesh->GetPointData()->RemoveArray("mesh_colors");
	outputMesh->GetPointData()->SetScalars(meshScalars);
	outputMesh->GetPointData()->Modified();
	cout << "output values: " << meshScalars->GetValue(0) << endl;
	mapper->GetInput()->DeepCopy(outputMesh);
	mapper->Modified();
	cout << "We won!" << endl;
}
void BoundaryNormals::ComputeBoundaryNormals(TNormalsImage* const boundaryNormalsImage, const float maskBlurVariance)
{
  // Blur the mask, compute the gradient, then keep the normals only at the original mask boundary

  // Compute the boundary of the mask
  typedef itk::Image<unsigned char, 2> BoundaryImageType;
  BoundaryImageType::Pointer boundaryImage = BoundaryImageType::New();
  this->MaskImage->CreateBoundaryImage(boundaryImage, Mask::VALID, 255);

  if(this->IsDebugOn())
  {
    ITKHelpers::WriteImage(boundaryImage.GetPointer(),
                           "ComputeBoundaryNormals.BoundaryImage.mha");
  }

  // Blur the mask so that the normals are not quantized so much. Also, pixels with only diagonal
  // valid neighbors have undefined gradients without this blurring.
  typedef itk::DiscreteGaussianImageFilter<Mask, ITKHelpers::FloatScalarImageType>  BlurFilterType;
  BlurFilterType::Pointer gaussianFilter = BlurFilterType::New();
  gaussianFilter->SetInput(this->MaskImage);
  gaussianFilter->SetVariance(maskBlurVariance);
  gaussianFilter->Update();

  if(this->IsDebugOn())
  {
     ITKHelpers::WriteImage(gaussianFilter->GetOutput(),
                            "ComputeBoundaryNormals.BlurredMask.mha");
  }

  // Compute the gradient of the blurred mask
  typedef itk::GradientImageFilter<ITKHelpers::FloatScalarImageType, float, float>  GradientFilterType;
  GradientFilterType::Pointer gradientFilter = GradientFilterType::New();
  gradientFilter->SetInput(gaussianFilter->GetOutput());
  gradientFilter->Update();

  if(this->IsDebugOn())
  {
    ITKHelpers::WriteImage(gradientFilter->GetOutput(),
                          "ComputeBoundaryNormals.BlurredMaskGradient.mha");
  }

  // Only keep the normals at the boundary
  typedef itk::MaskImageFilter<TNormalsImage, ITKHelpers::UnsignedCharScalarImageType, TNormalsImage> MaskFilterType;
  typename MaskFilterType::Pointer maskFilter = MaskFilterType::New();
  maskFilter->SetInput(gradientFilter->GetOutput());
  maskFilter->SetMaskImage(boundaryImage);
  maskFilter->Update();

  if(this->IsDebugOn())
  {
    ITKHelpers::WriteImage(maskFilter->GetOutput(),
                           "ComputeBoundaryNormals.BoundaryNormalsUnnormalized.mha");
  }

  // Allocate the image to return
//  ITKHelpers::DeepCopy(maskFilter->GetOutput(), boundaryNormalsImage);
  ITKHelpers::InitializeImage(boundaryNormalsImage, this->MaskImage->GetLargestPossibleRegion());

  // Normalize the vectors because we just care about their direction
  std::vector<itk::Index<2> > boundaryPixels = ITKHelpers::GetNonZeroPixels(boundaryImage.GetPointer());

  for(std::vector<itk::Index<2> >::const_iterator boundaryPixelIterator = boundaryPixels.begin();
      boundaryPixelIterator != boundaryPixels.end(); ++boundaryPixelIterator)
  {
    typename TNormalsImage::PixelType p = maskFilter->GetOutput()->GetPixel(*boundaryPixelIterator);
    p.Normalize();
    boundaryNormalsImage->SetPixel(*boundaryPixelIterator, p);
  }

}
void gradient_calc(int S_i, float* S, int i, int j, int k, float* data, int r, int s,  int t, unsigned short* output_label){
	
	//cout<<"Starting Definitions..."<<endl;	
	//define the pixel and image types	
	typedef float PixelType;
	typedef itk::Image<PixelType, 3> ImageType;
	typedef itk::CovariantVector< double, 3 > GradientPixelType;
	typedef itk::Image< GradientPixelType, 3 > GradientImageType;
	typedef itk::GradientRecursiveGaussianImageFilter<ImageType, GradientImageType> GradientFilterType;
		
	//Initialize a new image which will read the input label	
	ImageType::Pointer image = ImageType::New();
	ImageType::IndexType start;
	start[0] = 0;
	start[1] = 0; 
	start[2] = 0;
	ImageType::SizeType size;
	size[0] = k;
	size[1] = j;
	size[2] = i;
	ImageType::SpacingType spacing;
	spacing[0] = S[0]; 
	spacing[1] = S[1]; 
	spacing[2] = S[2];
	//cout<<"X_space="<<spacing[0]<<", Y_space="<<spacing[1]<<", Z_space="<<spacing[2]<<endl;
	image->SetSpacing( spacing );
	ImageType::RegionType region;
	region.SetSize( size );
	region.SetIndex( start );
	image->SetRegions( region );
	image->Allocate();

	//Create an image iterator to copy the input label into the image file
	typedef itk::ImageRegionIterator< ImageType > IteratorType;
	IteratorType it( image, image->GetRequestedRegion() );

	int count = 0;	
	while(!it.IsAtEnd()){	
		it.Set( data[ count ] );
		++it;
		count++;
	}
	
	//Calculate the vector gradient of the image
	GradientFilterType::Pointer gradientMapFilter = GradientFilterType::New();
	gradientMapFilter->SetInput( image );
	gradientMapFilter->SetSigma( 1.0 );
	gradientMapFilter->Update();

	//Creates a new image and iterator of the gradient
	GradientImageType::Pointer image2 = gradientMapFilter->GetOutput();
	typedef itk::ImageRegionConstIterator< GradientImageType > IteratorType2;
	IteratorType2 it2( image2, image2->GetRequestedRegion() );
	//ImageType::IndexType idx = it2.GetIndex();

	//Outputs the input image to test if the input worked correctly
	ImageType::RegionType region2 = image2->GetLargestPossibleRegion();

	long int count2 = 0;	
	while(!it2.IsAtEnd()){
	//while(count2 < (r * s * t - 1)){		
		float magnitude = sqrt(it2.Get()[0] * it2.Get()[0] + it2.Get()[1] * it2.Get()[1] + it2.Get()[2] * it2.Get()[2]);
		output_label[ count2 ] = magnitude ;
		count2++;
		++it2;

	}
	
}