Mat_< float > Saliency::assignFilter( const Mat_< Vec3b >& im, const Mat_< int >& seg, const vector< SuperpixelStatistic >& stat, const std::vector< float >& sal ) const {
	std::vector< float > source_features( seg.size().area()*5 ), target_features( im.size().area()*5 );
	Mat_< Vec2f > data( seg.size() );
	// There is a type on the paper: alpha and beta are actually squared, or directly applied to the values
	const float a = settings_.alpha_, b = settings_.beta_;
	
	const int D = 5;
	// Create the source features
	for( int j=0,k=0; j<seg.rows; j++ )
		for( int i=0; i<seg.cols; i++, k++ ) {
			int id = seg(j,i);
			data(j,i) = Vec2f( sal[id], 1 );
			
			source_features[D*k+0] = a * i;
			source_features[D*k+1] = a * j;
			if (D == 5) {
				source_features[D*k+2] = b * stat[id].mean_rgb_[0];
				source_features[D*k+3] = b * stat[id].mean_rgb_[1];
				source_features[D*k+4] = b * stat[id].mean_rgb_[2];
			}
		}
	// Create the source features
	for( int j=0,k=0; j<im.rows; j++ )
		for( int i=0; i<im.cols; i++, k++ ) {
			target_features[D*k+0] = a * i;
			target_features[D*k+1] = a * j;
			if (D == 5) {
				target_features[D*k+2] = b * im(j,i)[0];
				target_features[D*k+3] = b * im(j,i)[1];
				target_features[D*k+4] = b * im(j,i)[2];
			}
		}
	
	// Do the filtering [Filtering using the target features twice works slightly better, as the method described in our paper]
	if (settings_.use_spix_color_) {
		Filter filter( source_features.data(), seg.cols*seg.rows, target_features.data(), im.cols*im.rows, D );
		filter.filter( data.ptr<float>(), data.ptr<float>(), 2 );
	}
	else {
		Filter filter( target_features.data(), im.cols*im.rows, D );
		filter.filter( data.ptr<float>(), data.ptr<float>(), 2 );
	}
	
	Mat_<float> r( im.size() );
	for( int j=0; j<im.rows; j++ )
		for( int i=0; i<im.cols; i++ )
			r(j,i) = data(j,i)[0] / (data(j,i)[1] + 1e-10);
	return r;
}
		//
		// Feature
		//
		// Given the source and target, find and place feature patches on the given output
		void featurePatchMatching(const Heightmap *source, const Heightmap *target, Heightmap *output, int patch_size) {
			
			// perform ppa on target
			ppa::RidgeGraph target_ridges = ppa::ppa_Tasse(target);
			FeatureGraph target_features(&target_ridges);

			// create a list of all candidates
			std::vector<Patch> candidates = candidatePatches(source, patch_size);
	
			// flip
			size_t patch_count = candidates.size();
			for (size_t i = 0; i < patch_count; i++) {
				candidates.push_back(candidates[i].flipHorz());
			}


			// for every tree in the graph
			for (IsolatedFeature *f : target_features.roots()) {


				// bredth-first traversal
				unordered_set<IsolatedFeature *> visited;
				queue<IsolatedFeature *> to_visit;

				to_visit.push(f);

				while (!to_visit.empty()) {



					//static int l=0;
					//if (l++ > 1) return;

					// debug only place one node

					IsolatedFeature *start = to_visit.front();
					to_visit.pop();

					if (visited.find(start) != visited.end()) continue;


					// process start node
					vector<vec2> iso_feature_out = outpaths(start, patch_size / 4, output);
					for (vec2 &v : iso_feature_out) v = patch_size/2 + v * patch_size / 4;
					placeFeature(candidates, start->position, iso_feature_out, patch_size, output);
					visited.insert(start);


					for (PathFeature *pf : start->edges) {
						
						IsolatedFeature *end = pf->otherFeature(start);
						if (visited.find(end) != visited.end()) continue;
						
						// process pf edge
						for (vec2 c : spacedPoints(pf->raw_path, patch_size/2)) {
							vector<vec2> path_feature_out = outpaths(c, patch_size / 4, pf, output);
							for (vec2 &v : path_feature_out) v = patch_size / 2  + v*patch_size / 4;
							placeFeature(candidates, c, path_feature_out, patch_size, output);
						}

						to_visit.push(end);
					}
				}
			}
		}