Пример #1
0
int main(int argc, char** argv) {

	try {

		util::ProgramOptions::init(argc, argv);
		logger::LogManager::init();

		pipeline::Process<ImageStackDirectoryReader> intensityReader(optionIntensities.as<std::string>());
		pipeline::Process<ImageStackDirectoryReader> labelReader(optionLabels.as<std::string>());

		pipeline::Value<ImageStack> intensityStack = intensityReader->getOutput();
		pipeline::Value<ImageStack> labelStack = labelReader->getOutput();

		if (optionExtractLabels) {

			LOG_DEBUG(logger::out) << "[main] extracting labels from connected components" << std::endl;

			pipeline::Process<ExtractLabels> extractLabels;
			extractLabels->setInput(labelReader->getOutput());
			labelStack = extractLabels->getOutput();
		}

		unsigned int width  = labelStack->width();
		unsigned int height = labelStack->height();
		unsigned int depth  = labelStack->size();

		if (width  != intensityStack->width() ||
			height != intensityStack->height() ||
			depth  != intensityStack->size())
			UTIL_THROW_EXCEPTION(
					UsageError,
					"intensity and label stacks have different sizes");

		// create volumes from stacks

		ExplicitVolume<float> intensities(*intensityStack);
		ExplicitVolume<int>   labels(*labelStack);

		intensities.normalize();

		// store them in the project file

		boost::filesystem::remove(optionProjectFile.as<std::string>());
		Hdf5VolumeStore volumeStore(optionProjectFile.as<std::string>());

		volumeStore.saveIntensities(intensities);
		volumeStore.saveLabels(labels);

	} catch (boost::exception& e) {

		handleException(e, std::cerr);
	}
}
Пример #2
0
int main(int argc, char** argv) {

	try {

		util::ProgramOptions::init(argc, argv);
		logger::LogManager::init();

		Hdf5CragStore   cragStore(optionProjectFile.as<std::string>());
		Hdf5VolumeStore volumeStore(optionProjectFile.as<std::string>());

		Crag crag;
		cragStore.retrieveCrag(crag);

		NodeFeatures nodeFeatures(crag);
		EdgeFeatures edgeFeatures(crag);

		LOG_USER(logger::out) << "reading features" << std::endl;
		cragStore.retrieveNodeFeatures(crag, nodeFeatures);
		cragStore.retrieveEdgeFeatures(crag, edgeFeatures);

		BundleOptimizer::Parameters parameters;
		parameters.lambda      = optionRegularizerWeight;
		parameters.epsStrategy = BundleOptimizer::EpsFromGap;
		BundleOptimizer optimizer(parameters);

		BestEffort*  bestEffort  = 0;
		OverlapLoss* overlapLoss = 0;

		if (optionBestEffortFromProjectFile) {

			LOG_USER(logger::out) << "reading best-effort" << std::endl;

			bestEffort = new BestEffort(crag);

			vigra::HDF5File project(
					optionProjectFile.as<std::string>(),
					vigra::HDF5File::OpenMode::ReadWrite);
			project.cd("best_effort");
			vigra::ArrayVector<int> beNodes;
			vigra::MultiArray<2, int> beEdges;
			project.readAndResize("nodes", beNodes);
			project.readAndResize("edges", beEdges);

			std::set<int> nodes;
			for (int n : beNodes)
				nodes.insert(n);

			std::set<std::pair<int, int>> edges;
			for (int i = 0; i < beEdges.shape(1); i++)
				edges.insert(
						std::make_pair(
							std::min(beEdges(i, 0), beEdges(i, 1)),
							std::max(beEdges(i, 0), beEdges(i, 1))));

			for (Crag::NodeIt n(crag); n != lemon::INVALID; ++n)
				bestEffort->node[n] = nodes.count(crag.id(n));

			for (Crag::EdgeIt e(crag); e != lemon::INVALID; ++e)
				bestEffort->edge[e] = edges.count(
						std::make_pair(
								std::min(crag.id(crag.u(e)), crag.id(crag.v(e))),
								std::max(crag.id(crag.u(e)), crag.id(crag.v(e)))));

		} else {

			LOG_USER(logger::out) << "reading ground-truth" << std::endl;
			ExplicitVolume<int> groundTruth;
			volumeStore.retrieveGroundTruth(groundTruth);

			LOG_USER(logger::out) << "finding best-effort solution" << std::endl;
			overlapLoss = new OverlapLoss(crag, groundTruth);
			bestEffort = new BestEffort(crag, *overlapLoss);
		}

		Loss* loss = 0;
		bool  destructLoss = false;

		if (optionLoss.as<std::string>() == "hamming") {

			LOG_USER(logger::out) << "using Hamming loss" << std::endl;

			loss = new HammingLoss(crag, *bestEffort);
			destructLoss = true;

		} else if (optionLoss.as<std::string>() == "overlap") {

			LOG_USER(logger::out) << "using overlap loss" << std::endl;

			if (!overlapLoss) {

				LOG_USER(logger::out) << "reading ground-truth" << std::endl;
				ExplicitVolume<int> groundTruth;
				volumeStore.retrieveGroundTruth(groundTruth);

				LOG_USER(logger::out) << "finding best-effort solution" << std::endl;
				overlapLoss = new OverlapLoss(crag, groundTruth);
			}

			loss = overlapLoss;

		} else {

			LOG_ERROR(logger::out)
					<< "unknown loss: "
					<< optionLoss.as<std::string>()
					<< std::endl;
			return 1;
		}

		if (optionNormalizeLoss) {

			LOG_USER(logger::out) << "normalizing loss..." << std::endl;
			loss->normalize(crag, MultiCut::Parameters());
		}

		Oracle oracle(
				crag,
				nodeFeatures,
				edgeFeatures,
				*loss,
				*bestEffort);

		std::vector<double> weights(nodeFeatures.dims() + edgeFeatures.dims(), 0);
		optimizer.optimize(oracle, weights);

		storeVector(weights, optionFeatureWeights);

		if (destructLoss && loss != 0)
			delete loss;

		if (overlapLoss)
			delete overlapLoss;

		if (bestEffort)
			delete bestEffort;

	} catch (boost::exception& e) {

		handleException(e, std::cerr);
	}
}
Пример #3
0
int main(int argc, char** argv) {

	UTIL_TIME_SCOPE("main");

	try {

		util::ProgramOptions::init(argc, argv);
		logger::LogManager::init();

		util::point<float, 3> resolution(
				optionResX,
				optionResY,
				optionResZ);
		util::point<float, 3> offset(
				optionOffsetX,
				optionOffsetY,
				optionOffsetZ);

		Crag* crag = new Crag();
		CragVolumes* volumes = new CragVolumes(*crag);
		Costs* mergeCosts = 0;

		CragImport import;

		bool alreadyDownsampled = false;

		if (optionMergeTree) {

			UTIL_TIME_SCOPE("read CRAG from mergetree");

			// get information about the image to read
			std::string mergeTreePath = optionMergeTree;

			if (boost::filesystem::is_directory(boost::filesystem::path(mergeTreePath))) {

				std::vector<std::string> files = getImageFiles(mergeTreePath);

				// process one image after another
				std::vector<std::unique_ptr<Crag>> crags(files.size());
				std::vector<std::unique_ptr<CragVolumes>> cragsVolumes;
				for (auto& c : crags) {
					c = std::unique_ptr<Crag>(new Crag);
					cragsVolumes.push_back(std::unique_ptr<CragVolumes>(new CragVolumes(*c)));
				}

				int i = 0;
				for (std::string file : files) {
					
					LOG_USER(logger::out) << "reading crag from " << file << std::endl;

					import.readCrag(file, *crags[i], *cragsVolumes[i], resolution, offset + util::point<float, 3>(0, 0, resolution.z()*i));
					i++;
				}

				if (optionDownsampleCrag) {

					UTIL_TIME_SCOPE("downsample CRAG");

					DownSampler downSampler(optionMinCandidateSize.as<int>());

					std::vector<std::unique_ptr<Crag>> downSampledCrags(crags.size());
					std::vector<std::unique_ptr<CragVolumes>> downSampledVolumes(crags.size());

					for (int i = 0; i < crags.size(); i++) {

						downSampledCrags[i]   = std::unique_ptr<Crag>(new Crag());
						downSampledVolumes[i] = std::unique_ptr<CragVolumes>(new CragVolumes(*downSampledCrags[i]));

						downSampler.process(*crags[i], *cragsVolumes[i], *downSampledCrags[i], *downSampledVolumes[i]);
					}

					std::swap(cragsVolumes, downSampledVolumes);
					std::swap(crags, downSampledCrags);

					// prevent another downsampling on the candidates added by 
					// the combiner
					alreadyDownsampled = true;
				}

				// combine crags
				CragStackCombiner combiner;
				combiner.combine(crags, cragsVolumes, *crag, *volumes);

			} else {

				import.readCrag(mergeTreePath, *crag, *volumes, resolution, offset);
			}

		} else if (optionSupervoxels.as<bool>() && (optionMergeHistory.as<bool>() || optionCandidateSegmentation.as<bool>())) {

			UTIL_TIME_SCOPE("read CRAG from merge history");

			if (optionMergeHistory) {

				std::string mergeHistoryPath = optionMergeHistory;

				if (boost::filesystem::is_directory(boost::filesystem::path(mergeHistoryPath))) {

					// get all merge-history files
					std::vector<std::string> mhFiles;
					for (boost::filesystem::directory_iterator i(mergeHistoryPath); i != boost::filesystem::directory_iterator(); i++)
						if (!boost::filesystem::is_directory(*i) && (
							i->path().extension() == ".txt" ||
							i->path().extension() == ".dat"
						))
							mhFiles.push_back(i->path().native());
					std::sort(mhFiles.begin(), mhFiles.end());

					// get all supervoxel files
					std::vector<std::string> svFiles = getImageFiles(optionSupervoxels);

					// process one image after another
					std::vector<std::unique_ptr<Crag>> crags(mhFiles.size());
					std::vector<std::unique_ptr<CragVolumes>> cragsVolumes;
					for (auto& c : crags) {
						c = std::unique_ptr<Crag>(new Crag);
						cragsVolumes.push_back(std::unique_ptr<CragVolumes>(new CragVolumes(*c)));
					}

					for (int i = 0; i < mhFiles.size(); i++) {
						
						LOG_USER(logger::out) << "reading crag from supervoxel file " << svFiles[i] << " and merge history " << mhFiles[i] << std::endl;

						Costs mergeCosts(*crags[i]);
						import.readCragFromMergeHistory(svFiles[i], mhFiles[i], *crags[i], *cragsVolumes[i], resolution, offset + util::point<float, 3>(0, 0, resolution.z()*i), mergeCosts);
					}

					if (optionDownsampleCrag) {

						UTIL_TIME_SCOPE("downsample CRAG");

						DownSampler downSampler(optionMinCandidateSize.as<int>());

						std::vector<std::unique_ptr<Crag>> downSampledCrags(crags.size());
						std::vector<std::unique_ptr<CragVolumes>> downSampledVolumes(crags.size());

						for (int i = 0; i < crags.size(); i++) {

							downSampledCrags[i]   = std::unique_ptr<Crag>(new Crag());
							downSampledVolumes[i] = std::unique_ptr<CragVolumes>(new CragVolumes(*downSampledCrags[i]));

							downSampler.process(*crags[i], *cragsVolumes[i], *downSampledCrags[i], *downSampledVolumes[i]);
						}

						std::swap(cragsVolumes, downSampledVolumes);
						std::swap(crags, downSampledCrags);

						// prevent another downsampling on the candidates added by 
						// the combiner
						alreadyDownsampled = true;
					}

					// combine crags
					CragStackCombiner combiner;
					combiner.combine(crags, cragsVolumes, *crag, *volumes);

				} else {

					mergeCosts = new Costs(*crag);
					import.readCragFromMergeHistory(optionSupervoxels, optionMergeHistory, *crag, *volumes, resolution, offset, *mergeCosts);

				}

			} else
				import.readCragFromCandidateSegmentation(optionSupervoxels, optionCandidateSegmentation, *crag, *volumes, resolution, offset);

		} else {

			LOG_ERROR(logger::out)
					<< "at least one of mergetree or (supervoxels && mergeHistory) "
					<< "have to be given to create a CRAG" << std::endl;

			return 1;
		}

		if (optionDownsampleCrag && !alreadyDownsampled) {

			UTIL_TIME_SCOPE("downsample CRAG");

			Crag* downSampled = new Crag();
			CragVolumes* downSampledVolumes = new CragVolumes(*downSampled);

			if (optionMinCandidateSize) {

				DownSampler downSampler(optionMinCandidateSize.as<int>());
				downSampler.process(*crag, *volumes, *downSampled, *downSampledVolumes);
			} else {

				DownSampler downSampler;
				downSampler.process(*crag, *volumes, *downSampled, *downSampledVolumes);
			}

			delete crag;
			delete volumes;
			if (mergeCosts) {
				delete mergeCosts;
				mergeCosts = 0;
			}
			crag = downSampled;
			volumes = downSampledVolumes;
		}

		{
			UTIL_TIME_SCOPE("find CRAG adjacencies");

			PlanarAdjacencyAnnotator annotator(PlanarAdjacencyAnnotator::Direct);
			annotator.annotate(*crag, *volumes);
		}

		// Statistics

		int numNodes = 0;
		int numRootNodes = 0;
		double sumSubsetDepth = 0;
		int maxSubsetDepth = 0;
		int minSubsetDepth = 1e6;

		for (Crag::NodeIt n(*crag); n != lemon::INVALID; ++n) {

			if (crag->isRootNode(n)) {

				int depth = crag->getLevel(n);

				sumSubsetDepth += depth;
				minSubsetDepth = std::min(minSubsetDepth, depth);
				maxSubsetDepth = std::max(maxSubsetDepth, depth);
				numRootNodes++;
			}

			numNodes++;
		}

		int numAdjEdges = 0;
		for (Crag::EdgeIt e(*crag); e != lemon::INVALID; ++e)
			numAdjEdges++;
		int numSubEdges = 0;
		for (Crag::SubsetArcIt e(*crag); e != lemon::INVALID; ++e)
			numSubEdges++;

		LOG_USER(logger::out) << "created CRAG" << std::endl;
		LOG_USER(logger::out) << "\t# nodes          : " << numNodes << std::endl;
		LOG_USER(logger::out) << "\t# root nodes     : " << numRootNodes << std::endl;
		LOG_USER(logger::out) << "\t# adjacencies    : " << numAdjEdges << std::endl;
		LOG_USER(logger::out) << "\t# subset edges   : " << numSubEdges << std::endl;
		LOG_USER(logger::out) << "\tmax subset depth : " << maxSubsetDepth << std::endl;
		LOG_USER(logger::out) << "\tmin subset depth : " << minSubsetDepth << std::endl;
		LOG_USER(logger::out) << "\tmean subset depth: " << sumSubsetDepth/numRootNodes << std::endl;

		// Store CRAG and volumes

		boost::filesystem::remove(optionProjectFile.as<std::string>());
		Hdf5CragStore store(optionProjectFile.as<std::string>());

		{
			UTIL_TIME_SCOPE("saving CRAG");

			store.saveCrag(*crag);
			store.saveVolumes(*volumes);
			if (mergeCosts)
				store.saveCosts(*crag, *mergeCosts, "merge-scores");
		}

		{
			UTIL_TIME_SCOPE("saving volumes");

			Hdf5VolumeStore volumeStore(optionProjectFile.as<std::string>());

			ExplicitVolume<float> intensities = readVolume<float>(getImageFiles(optionIntensities));
			intensities.setResolution(resolution);
			intensities.setOffset(offset);
			intensities.normalize();
			volumeStore.saveIntensities(intensities);

			if (optionGroundTruth) {

				ExplicitVolume<int> groundTruth = readVolume<int>(getImageFiles(optionGroundTruth));

				if (optionExtractGroundTruthLabels) {

					vigra::MultiArray<3, int> tmp(groundTruth.data().shape());
					vigra::labelMultiArrayWithBackground(
							groundTruth.data(),
							tmp);
					groundTruth.data() = tmp;
				}

				groundTruth.setResolution(resolution);
				groundTruth.setOffset(offset);
				volumeStore.saveGroundTruth(groundTruth);
			}

			if (optionBoundaries) {

				ExplicitVolume<float> boundaries = readVolume<float>(getImageFiles(optionBoundaries));
				boundaries.setResolution(resolution);
				boundaries.setOffset(offset);
				boundaries.normalize();
				volumeStore.saveBoundaries(boundaries);
			}

			bool atLeastOneAffinity = optionXAffinities || optionYAffinities || optionZAffinities;
			bool allAfinities = optionXAffinities && optionYAffinities && optionZAffinities;

			if (atLeastOneAffinity) {

				if (!allAfinities) {

					LOG_ERROR(logger::out)
							<< "One of the affinities was not provided. "
							<< "Affinities will be ignored." << std::endl;

				} else {

					ExplicitVolume<float> xAffinities = readVolume<float>(getImageFiles(optionXAffinities));
					ExplicitVolume<float> yAffinities = readVolume<float>(getImageFiles(optionYAffinities));
					ExplicitVolume<float> zAffinities = readVolume<float>(getImageFiles(optionZAffinities));

					volumeStore.saveAffinities( xAffinities, yAffinities, zAffinities);
				}
			}
		}

		delete crag;
		delete volumes;
		delete mergeCosts;

		if (optionImportTrainingResult) {

			LOG_USER(logger::out)
							<< "importing training results from "
							<< optionImportTrainingResult.as<std::string>()
							<< std::endl;

			Hdf5CragStore trainingStore(optionImportTrainingResult.as<std::string>());

			FeatureWeights weights;
			FeatureWeights min;
			FeatureWeights max;

			trainingStore.retrieveFeatureWeights(weights);
			trainingStore.retrieveFeaturesMin(min);
			trainingStore.retrieveFeaturesMax(max);

			store.saveFeatureWeights(weights);
			store.saveFeaturesMin(min);
			store.saveFeaturesMax(max);
		}

	} catch (Exception& e) {

		handleException(e, std::cerr);
	}
}