bool
AdjacencyAnnotator::isSiblingEdge(Crag& crag, Crag::CragEdge e) {

	if (crag.isRootNode(e.u()) || crag.isRootNode(e.v()))
		return false;

	Crag::CragNode parentU = (*crag.outArcs(e.u()).begin()).target();
	Crag::CragNode parentV = (*crag.outArcs(e.v()).begin()).target();

	return (parentU == parentV);
}
Example #2
0
void create_crag() {

	// useful for later:
	//
	//boost::filesystem::path dataDir = dir_of(__FILE__);
	//boost::filesystem::path graphfile = dataDir/"star.dat";

	Crag crag;

	// add 100 nodes
	for (int i = 0; i < 100; i++)
		crag.addNode();

	// create chains of 10 nodes
	for (int i = 0; i < 100; i += 10) {

		for (int j = i+1; j < i + 10; j++) {

			Crag::Node u = crag.nodeFromId(j-1);
			Crag::Node v = crag.nodeFromId(j);
			crag.addSubsetArc(u, v);
		}
	}

	// check levels of nodes
	for (int i = 0; i < 100; i++) {

		Crag::Node n = crag.nodeFromId(i);
		BOOST_CHECK_EQUAL(crag.getLevel(n), i%10);

		if (i%10 == 0)
			BOOST_CHECK(crag.isLeafNode(n));
		else
			BOOST_CHECK(!crag.isLeafNode(n));

		if (i%10 == 9)
			BOOST_CHECK(crag.isRootNode(n));
		else
			BOOST_CHECK(!crag.isRootNode(n));
	}
}
void
AdjacencyAnnotator::propagateLeafAdjacencies(Crag& crag) {

	_numAdded = 0;
	for (Crag::CragNode n : crag.nodes())
		if (crag.isRootNode(n))
			recurseAdjacencies(crag, n);

	if (optionPruneChildEdges)
		pruneChildEdges(crag);

	LOG_USER(adjacencyannotatorlog)
			<< "added " << _numAdded << " super node adjacency edges"
			<< std::endl;
}
Example #4
0
std::vector<Crag::CragNode>
HammingLoss::getPath(Crag::CragNode n, const Crag& crag) {

	std::vector<Crag::CragNode> path;

	while (true) {

		path.push_back(n);

		if (crag.isRootNode(n))
			break;

		n = (*crag.outArcs(n).begin()).target();
	}

	return path;
}
Example #5
0
bool
HammingLoss::isBestEffort(Crag::CragNode n, const Crag& crag, const BestEffort& bestEffort) {

	if (!_balance)
		return bestEffort.selected(n);

	// if balanced, non-leaf nodes are not considered part of best-effort
	if (!crag.isLeafNode(n))
		return false;

	// is any of the parents best-effort?
	while (true) {

		if (bestEffort.selected(n))
			return true;

		if (crag.isRootNode(n))
			return false;

		n = (*crag.outArcs(n).begin()).target();
	}
}
int main(int argc, char** argv) {

	UTIL_TIME_SCOPE("main");

	try {

		util::ProgramOptions::init(argc, argv);
		logger::LogManager::init();

		util::point<float, 3> resolution(
				optionResX,
				optionResY,
				optionResZ);
		util::point<float, 3> offset(
				optionOffsetX,
				optionOffsetY,
				optionOffsetZ);

		Crag* crag = new Crag();
		CragVolumes* volumes = new CragVolumes(*crag);
		Costs* mergeCosts = 0;

		CragImport import;

		bool alreadyDownsampled = false;

		if (optionMergeTree) {

			UTIL_TIME_SCOPE("read CRAG from mergetree");

			// get information about the image to read
			std::string mergeTreePath = optionMergeTree;

			if (boost::filesystem::is_directory(boost::filesystem::path(mergeTreePath))) {

				std::vector<std::string> files = getImageFiles(mergeTreePath);

				// process one image after another
				std::vector<std::unique_ptr<Crag>> crags(files.size());
				std::vector<std::unique_ptr<CragVolumes>> cragsVolumes;
				for (auto& c : crags) {
					c = std::unique_ptr<Crag>(new Crag);
					cragsVolumes.push_back(std::unique_ptr<CragVolumes>(new CragVolumes(*c)));
				}

				int i = 0;
				for (std::string file : files) {
					
					LOG_USER(logger::out) << "reading crag from " << file << std::endl;

					import.readCrag(file, *crags[i], *cragsVolumes[i], resolution, offset + util::point<float, 3>(0, 0, resolution.z()*i));
					i++;
				}

				if (optionDownsampleCrag) {

					UTIL_TIME_SCOPE("downsample CRAG");

					DownSampler downSampler(optionMinCandidateSize.as<int>());

					std::vector<std::unique_ptr<Crag>> downSampledCrags(crags.size());
					std::vector<std::unique_ptr<CragVolumes>> downSampledVolumes(crags.size());

					for (int i = 0; i < crags.size(); i++) {

						downSampledCrags[i]   = std::unique_ptr<Crag>(new Crag());
						downSampledVolumes[i] = std::unique_ptr<CragVolumes>(new CragVolumes(*downSampledCrags[i]));

						downSampler.process(*crags[i], *cragsVolumes[i], *downSampledCrags[i], *downSampledVolumes[i]);
					}

					std::swap(cragsVolumes, downSampledVolumes);
					std::swap(crags, downSampledCrags);

					// prevent another downsampling on the candidates added by 
					// the combiner
					alreadyDownsampled = true;
				}

				// combine crags
				CragStackCombiner combiner;
				combiner.combine(crags, cragsVolumes, *crag, *volumes);

			} else {

				import.readCrag(mergeTreePath, *crag, *volumes, resolution, offset);
			}

		} else if (optionSupervoxels.as<bool>() && (optionMergeHistory.as<bool>() || optionCandidateSegmentation.as<bool>())) {

			UTIL_TIME_SCOPE("read CRAG from merge history");

			if (optionMergeHistory) {

				std::string mergeHistoryPath = optionMergeHistory;

				if (boost::filesystem::is_directory(boost::filesystem::path(mergeHistoryPath))) {

					// get all merge-history files
					std::vector<std::string> mhFiles;
					for (boost::filesystem::directory_iterator i(mergeHistoryPath); i != boost::filesystem::directory_iterator(); i++)
						if (!boost::filesystem::is_directory(*i) && (
							i->path().extension() == ".txt" ||
							i->path().extension() == ".dat"
						))
							mhFiles.push_back(i->path().native());
					std::sort(mhFiles.begin(), mhFiles.end());

					// get all supervoxel files
					std::vector<std::string> svFiles = getImageFiles(optionSupervoxels);

					// process one image after another
					std::vector<std::unique_ptr<Crag>> crags(mhFiles.size());
					std::vector<std::unique_ptr<CragVolumes>> cragsVolumes;
					for (auto& c : crags) {
						c = std::unique_ptr<Crag>(new Crag);
						cragsVolumes.push_back(std::unique_ptr<CragVolumes>(new CragVolumes(*c)));
					}

					for (int i = 0; i < mhFiles.size(); i++) {
						
						LOG_USER(logger::out) << "reading crag from supervoxel file " << svFiles[i] << " and merge history " << mhFiles[i] << std::endl;

						Costs mergeCosts(*crags[i]);
						import.readCragFromMergeHistory(svFiles[i], mhFiles[i], *crags[i], *cragsVolumes[i], resolution, offset + util::point<float, 3>(0, 0, resolution.z()*i), mergeCosts);
					}

					if (optionDownsampleCrag) {

						UTIL_TIME_SCOPE("downsample CRAG");

						DownSampler downSampler(optionMinCandidateSize.as<int>());

						std::vector<std::unique_ptr<Crag>> downSampledCrags(crags.size());
						std::vector<std::unique_ptr<CragVolumes>> downSampledVolumes(crags.size());

						for (int i = 0; i < crags.size(); i++) {

							downSampledCrags[i]   = std::unique_ptr<Crag>(new Crag());
							downSampledVolumes[i] = std::unique_ptr<CragVolumes>(new CragVolumes(*downSampledCrags[i]));

							downSampler.process(*crags[i], *cragsVolumes[i], *downSampledCrags[i], *downSampledVolumes[i]);
						}

						std::swap(cragsVolumes, downSampledVolumes);
						std::swap(crags, downSampledCrags);

						// prevent another downsampling on the candidates added by 
						// the combiner
						alreadyDownsampled = true;
					}

					// combine crags
					CragStackCombiner combiner;
					combiner.combine(crags, cragsVolumes, *crag, *volumes);

				} else {

					mergeCosts = new Costs(*crag);
					import.readCragFromMergeHistory(optionSupervoxels, optionMergeHistory, *crag, *volumes, resolution, offset, *mergeCosts);

				}

			} else
				import.readCragFromCandidateSegmentation(optionSupervoxels, optionCandidateSegmentation, *crag, *volumes, resolution, offset);

		} else {

			LOG_ERROR(logger::out)
					<< "at least one of mergetree or (supervoxels && mergeHistory) "
					<< "have to be given to create a CRAG" << std::endl;

			return 1;
		}

		if (optionDownsampleCrag && !alreadyDownsampled) {

			UTIL_TIME_SCOPE("downsample CRAG");

			Crag* downSampled = new Crag();
			CragVolumes* downSampledVolumes = new CragVolumes(*downSampled);

			if (optionMinCandidateSize) {

				DownSampler downSampler(optionMinCandidateSize.as<int>());
				downSampler.process(*crag, *volumes, *downSampled, *downSampledVolumes);
			} else {

				DownSampler downSampler;
				downSampler.process(*crag, *volumes, *downSampled, *downSampledVolumes);
			}

			delete crag;
			delete volumes;
			if (mergeCosts) {
				delete mergeCosts;
				mergeCosts = 0;
			}
			crag = downSampled;
			volumes = downSampledVolumes;
		}

		{
			UTIL_TIME_SCOPE("find CRAG adjacencies");

			PlanarAdjacencyAnnotator annotator(PlanarAdjacencyAnnotator::Direct);
			annotator.annotate(*crag, *volumes);
		}

		// Statistics

		int numNodes = 0;
		int numRootNodes = 0;
		double sumSubsetDepth = 0;
		int maxSubsetDepth = 0;
		int minSubsetDepth = 1e6;

		for (Crag::NodeIt n(*crag); n != lemon::INVALID; ++n) {

			if (crag->isRootNode(n)) {

				int depth = crag->getLevel(n);

				sumSubsetDepth += depth;
				minSubsetDepth = std::min(minSubsetDepth, depth);
				maxSubsetDepth = std::max(maxSubsetDepth, depth);
				numRootNodes++;
			}

			numNodes++;
		}

		int numAdjEdges = 0;
		for (Crag::EdgeIt e(*crag); e != lemon::INVALID; ++e)
			numAdjEdges++;
		int numSubEdges = 0;
		for (Crag::SubsetArcIt e(*crag); e != lemon::INVALID; ++e)
			numSubEdges++;

		LOG_USER(logger::out) << "created CRAG" << std::endl;
		LOG_USER(logger::out) << "\t# nodes          : " << numNodes << std::endl;
		LOG_USER(logger::out) << "\t# root nodes     : " << numRootNodes << std::endl;
		LOG_USER(logger::out) << "\t# adjacencies    : " << numAdjEdges << std::endl;
		LOG_USER(logger::out) << "\t# subset edges   : " << numSubEdges << std::endl;
		LOG_USER(logger::out) << "\tmax subset depth : " << maxSubsetDepth << std::endl;
		LOG_USER(logger::out) << "\tmin subset depth : " << minSubsetDepth << std::endl;
		LOG_USER(logger::out) << "\tmean subset depth: " << sumSubsetDepth/numRootNodes << std::endl;

		// Store CRAG and volumes

		boost::filesystem::remove(optionProjectFile.as<std::string>());
		Hdf5CragStore store(optionProjectFile.as<std::string>());

		{
			UTIL_TIME_SCOPE("saving CRAG");

			store.saveCrag(*crag);
			store.saveVolumes(*volumes);
			if (mergeCosts)
				store.saveCosts(*crag, *mergeCosts, "merge-scores");
		}

		{
			UTIL_TIME_SCOPE("saving volumes");

			Hdf5VolumeStore volumeStore(optionProjectFile.as<std::string>());

			ExplicitVolume<float> intensities = readVolume<float>(getImageFiles(optionIntensities));
			intensities.setResolution(resolution);
			intensities.setOffset(offset);
			intensities.normalize();
			volumeStore.saveIntensities(intensities);

			if (optionGroundTruth) {

				ExplicitVolume<int> groundTruth = readVolume<int>(getImageFiles(optionGroundTruth));

				if (optionExtractGroundTruthLabels) {

					vigra::MultiArray<3, int> tmp(groundTruth.data().shape());
					vigra::labelMultiArrayWithBackground(
							groundTruth.data(),
							tmp);
					groundTruth.data() = tmp;
				}

				groundTruth.setResolution(resolution);
				groundTruth.setOffset(offset);
				volumeStore.saveGroundTruth(groundTruth);
			}

			if (optionBoundaries) {

				ExplicitVolume<float> boundaries = readVolume<float>(getImageFiles(optionBoundaries));
				boundaries.setResolution(resolution);
				boundaries.setOffset(offset);
				boundaries.normalize();
				volumeStore.saveBoundaries(boundaries);
			}

			bool atLeastOneAffinity = optionXAffinities || optionYAffinities || optionZAffinities;
			bool allAfinities = optionXAffinities && optionYAffinities && optionZAffinities;

			if (atLeastOneAffinity) {

				if (!allAfinities) {

					LOG_ERROR(logger::out)
							<< "One of the affinities was not provided. "
							<< "Affinities will be ignored." << std::endl;

				} else {

					ExplicitVolume<float> xAffinities = readVolume<float>(getImageFiles(optionXAffinities));
					ExplicitVolume<float> yAffinities = readVolume<float>(getImageFiles(optionYAffinities));
					ExplicitVolume<float> zAffinities = readVolume<float>(getImageFiles(optionZAffinities));

					volumeStore.saveAffinities( xAffinities, yAffinities, zAffinities);
				}
			}
		}

		delete crag;
		delete volumes;
		delete mergeCosts;

		if (optionImportTrainingResult) {

			LOG_USER(logger::out)
							<< "importing training results from "
							<< optionImportTrainingResult.as<std::string>()
							<< std::endl;

			Hdf5CragStore trainingStore(optionImportTrainingResult.as<std::string>());

			FeatureWeights weights;
			FeatureWeights min;
			FeatureWeights max;

			trainingStore.retrieveFeatureWeights(weights);
			trainingStore.retrieveFeaturesMin(min);
			trainingStore.retrieveFeaturesMax(max);

			store.saveFeatureWeights(weights);
			store.saveFeaturesMin(min);
			store.saveFeaturesMax(max);
		}

	} catch (Exception& e) {

		handleException(e, std::cerr);
	}
}