bool Worker::Thresholds::operator() (const Tractography::Streamline<>& in) const { return ((in.size() <= max_num_points) && (in.size() >= min_num_points) && (in.weight <= max_weight) && (in.weight >= min_weight)); }
bool WriteKernelDynamic::operator() (const Tracking::GeneratedTrack& in, Tractography::Streamline<>& out) { out.index = writer.count; out.weight = 1.0; if (!WriteKernel::operator() (in)) { out.clear(); // Flag to indicate that tracking has completed, and threads should therefore terminate out.weight = 0.0; // Actually need to pass this down the queue so that the seeder thread receives it and knows to terminate return true; } out = in; return true; }
bool Receiver::operator() (const Tractography::Streamline<>& in) { auto display_func = [&](){ return printf ("%8" PRIu64 " read, %8" PRIu64 " written", total_count, count); }; if (number && (count == number)) return false; ++total_count; if (in.empty()) { writer (in); progress.update (display_func); return true; } if (in[0].valid()) { if (skip) { --skip; progress.update (display_func); return true; } writer (in); } else { // Explicitly handle case where the streamline has been cropped into multiple components // Worker class separates track segments using invalid points as delimiters Tractography::Streamline<> temp; temp.index = in.index; temp.weight = in.weight; for (Tractography::Streamline<>::const_iterator p = in.begin(); p != in.end(); ++p) { if (p->valid()) { temp.push_back (*p); } else if (temp.size()) { writer (temp); temp.clear(); } } } ++count; progress.update (display_func); return (!(number && (count == number))); }
void run () { const bool weights_provided = get_options ("tck_weights_in").size(); float step_size = NAN; size_t count = 0, header_count = 0; float min_length = std::numeric_limits<float>::infinity(); float max_length = 0.0f; double sum_lengths = 0.0, sum_weights = 0.0; std::vector<double> histogram; std::vector<LW> all_lengths; all_lengths.reserve (header_count); { Tractography::Properties properties; Tractography::Reader<float> reader (argument[0], properties); if (properties.find ("count") != properties.end()) header_count = to<size_t> (properties["count"]); if (properties.find ("output_step_size") != properties.end()) step_size = to<float> (properties["output_step_size"]); else step_size = to<float> (properties["step_size"]); if (!std::isfinite (step_size) || !step_size) { WARN ("Streamline step size undefined in header"); if (get_options ("histogram").size()) WARN ("Histogram will be henerated using a 1mm interval"); } std::unique_ptr<File::OFStream> dump; Options opt = get_options ("dump"); if (opt.size()) dump.reset (new File::OFStream (std::string(opt[0][0]), std::ios_base::out | std::ios_base::trunc)); ProgressBar progress ("Reading track file... ", header_count); Tractography::Streamline<> tck; while (reader (tck)) { ++count; const float length = std::isfinite (step_size) ? tck.calc_length (step_size) : tck.calc_length(); min_length = std::min (min_length, length); max_length = std::max (max_length, length); sum_lengths += tck.weight * length; sum_weights += tck.weight; all_lengths.push_back (LW (length, tck.weight)); const size_t index = std::isfinite (step_size) ? std::round (length / step_size) : std::round (length); while (histogram.size() <= index) histogram.push_back (0.0); histogram[index] += tck.weight; if (dump) (*dump) << length << "\n"; ++progress; } } if (histogram.front()) WARN ("read " + str(histogram.front()) + " zero-length tracks"); if (count != header_count) WARN ("expected " + str(header_count) + " tracks according to header; read " + str(count)); const float mean_length = sum_lengths / sum_weights; float median_length = 0.0f; if (weights_provided) { // Perform a weighted median calculation std::sort (all_lengths.begin(), all_lengths.end()); size_t median_index = 0; double sum = sum_weights - all_lengths[0].get_weight(); while (sum > 0.5 * sum_weights) { sum -= all_lengths[++median_index].get_weight(); } median_length = all_lengths[median_index].get_length(); } else { median_length = Math::median (all_lengths).get_length(); } double stdev = 0.0; for (std::vector<LW>::const_iterator i = all_lengths.begin(); i != all_lengths.end(); ++i) stdev += i->get_weight() * Math::pow2 (i->get_length() - mean_length); stdev = std::sqrt (stdev / (((count - 1) / float(count)) * sum_weights)); const size_t width = 12; std::cout << " " << std::setw(width) << std::right << "mean" << " " << std::setw(width) << std::right << "median" << " " << std::setw(width) << std::right << "std. dev." << " " << std::setw(width) << std::right << "min" << " " << std::setw(width) << std::right << "max" << " " << std::setw(width) << std::right << "count\n"; std::cout << " " << std::setw(width) << std::right << (mean_length) << " " << std::setw(width) << std::right << (median_length) << " " << std::setw(width) << std::right << (stdev) << " " << std::setw(width) << std::right << (min_length) << " " << std::setw(width) << std::right << (max_length) << " " << std::setw(width) << std::right << (count) << "\n"; Options opt = get_options ("histogram"); if (opt.size()) { File::OFStream out (opt[0][0], std::ios_base::out | std::ios_base::trunc); if (!std::isfinite (step_size)) step_size = 1.0f; if (weights_provided) { out << "Length,Sum_weights\n"; for (size_t i = 0; i != histogram.size(); ++i) out << str(i * step_size) << "," << str(histogram[i]) << "\n"; } else { out << "Length,Count\n"; for (size_t i = 0; i != histogram.size(); ++i) out << str(i * step_size) << "," << str<size_t>(histogram[i]) << "\n"; } out << "\n"; out.close(); } }
bool Worker::operator() (const Tractography::Streamline<>& in, Tractography::Streamline<>& out) const { out.clear(); out.index = in.index; out.weight = in.weight; if (!thresholds (in)) { // Want to test thresholds before wasting time on upsampling; but if -inverse is set, // still need to apply both the upsampler and downsampler before writing to output if (inverse) { std::vector< Point<float> > tck (in); upsampler (tck); downsampler (tck); tck.swap (out); } return true; } // Upsample track before mapping to ROIs std::vector< Point<float> > tck (in); upsampler (tck); // Assign to ROIs if (properties.include.size() || properties.exclude.size()) { include_visited.assign (properties.include.size(), false); for (std::vector< Point<float> >::const_iterator p = tck.begin(); p != tck.end(); ++p) { properties.include.contains (*p, include_visited); if (properties.exclude.contains (*p)) { if (inverse) { downsampler (tck); tck.swap (out); } return true; } } // Make sure all of the include regions were visited for (std::vector<bool>::const_iterator i = include_visited.begin(); i != include_visited.end(); ++i) { if (!*i) { if (inverse) { downsampler (tck); tck.swap (out); } return true; } } } if (properties.mask.size()) { // Split tck into separate tracks based on the mask std::vector< std::vector< Point<float> > > cropped_tracks; std::vector< Point<float> > temp; for (std::vector< Point<float> >::const_iterator p = tck.begin(); p != tck.end(); ++p) { const bool contains = properties.mask.contains (*p); if (contains == inverse) { if (temp.size() >= 2) cropped_tracks.push_back (temp); temp.clear(); } else { temp.push_back (*p); } } if (temp.size() >= 2) cropped_tracks.push_back (temp); if (cropped_tracks.empty()) return true; // Apply downsampler independently to each for (std::vector< std::vector< Point<float> > >::iterator i = cropped_tracks.begin(); i != cropped_tracks.end(); ++i) downsampler (*i); if (cropped_tracks.size() == 1) { cropped_tracks[0].swap (out); return true; } // Stitch back together in preparation for sending down queue as a single track out.push_back (Point<float>()); for (std::vector< std::vector< Point<float> > >::const_iterator i = cropped_tracks.begin(); i != cropped_tracks.end(); ++i) { for (std::vector< Point<float> >::const_iterator p = i->begin(); p != i->end(); ++p) out.push_back (*p); out.push_back (Point<float>()); } out.push_back (Point<float>()); return true; } else { if (!inverse) { downsampler (tck); tck.swap (out); } return true; } }