int main()
{
  // Insert of two features into a map and iterate over the features.
  FeatureMap map;

  Feature feature;
  feature.setRT(15.0);
  feature.setMZ(571.3);
  map.push_back(feature); //append feature 1
  feature.setRT(23.3);
  feature.setMZ(1311.3);
  map.push_back(feature); //append feature 2

  // Iteration over FeatureMap
  for (auto it = map.begin(); it != map.end(); ++it)
  {
    cout << it->getRT() << " - " << it->getMZ() << endl;
  }

  // Calculate and output the ranges
  map.updateRanges();
  cout << "Int: " << map.getMinInt() << " - " << map.getMaxInt() << endl;
  cout << "RT:  " << map.getMin()[0] << " - " << map.getMax()[0] << endl;
  cout << "m/z: " << map.getMin()[1] << " - " << map.getMax()[1] << endl;

  // ... and many more
  return 0;
} //end of main
 ExitCodes main_(int, const char **)
 {
   String in = getStringOption_("in"), out = getStringOption_("out");
   FileTypes::Type in_type = FileHandler::getType(in);
   if (in_type == FileTypes::FEATUREXML)
   {
     FeatureMap<> features;
     FeatureXMLFile().load(in, features);
     for (FeatureMap<>::Iterator feat_it = features.begin();
          feat_it != features.end(); ++feat_it)
     {
       resolveConflict_(feat_it->getPeptideIdentifications());
     }
     addDataProcessing_(features,
                        getProcessingInfo_(DataProcessing::FILTERING));
     FeatureXMLFile().store(out, features);
   }
   else     // consensusXML
   {
     ConsensusMap consensus;
     ConsensusXMLFile().load(in, consensus);
     for (ConsensusMap::Iterator cons_it = consensus.begin();
          cons_it != consensus.end(); ++cons_it)
     {
       resolveConflict_(cons_it->getPeptideIdentifications());
     }
     addDataProcessing_(consensus,
                        getProcessingInfo_(DataProcessing::FILTERING));
     ConsensusXMLFile().store(out, consensus);
   }
   return EXECUTION_OK;
 }
Example #3
0
CAddonButtonMap::DriverMap CAddonButtonMap::CreateLookupTable(const FeatureMap& features)
{
  using namespace JOYSTICK;

  DriverMap driverMap;

  for (FeatureMap::const_iterator it = features.begin(); it != features.end(); ++it)
  {
    const ADDON::JoystickFeature& feature = it->second;

    switch (feature.Type())
    {
      case JOYSTICK_FEATURE_TYPE_SCALAR:
      {
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Primitive(JOYSTICK_SCALAR_PRIMITIVE))] = it->first;
        break;
      }

      case JOYSTICK_FEATURE_TYPE_ANALOG_STICK:
      {
        std::vector<JOYSTICK_FEATURE_PRIMITIVE> primitives = {
          JOYSTICK_ANALOG_STICK_UP,
          JOYSTICK_ANALOG_STICK_DOWN,
          JOYSTICK_ANALOG_STICK_RIGHT,
          JOYSTICK_ANALOG_STICK_LEFT,
        };

        for (auto primitive : primitives)
          driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Primitive(primitive))] = it->first;
        break;
      }

      case JOYSTICK_FEATURE_TYPE_ACCELEROMETER:
      {
        std::vector<JOYSTICK_FEATURE_PRIMITIVE> primitives = {
          JOYSTICK_ACCELEROMETER_POSITIVE_X,
          JOYSTICK_ACCELEROMETER_POSITIVE_Y,
          JOYSTICK_ACCELEROMETER_POSITIVE_Z,
        };

        for (auto primitive : primitives)
        {
          CDriverPrimitive translatedPrimitive = CPeripheralAddonTranslator::TranslatePrimitive(feature.Primitive(primitive));
          driverMap[translatedPrimitive] = it->first;

          // Map opposite semiaxis
          CDriverPrimitive oppositePrimitive = CDriverPrimitive(translatedPrimitive.Index(), 0, translatedPrimitive.SemiAxisDirection() * -1, 1);
          driverMap[oppositePrimitive] = it->first;
        }
        break;
      }
        
      default:
        break;
    }
  }
  
  return driverMap;
}
Example #4
0
 double length_sq() {
     double l = 0.0;
     FeatureIterator fit;
     for (fit = feat_map.begin(); fit != feat_map.end(); fit++) {
         Feature &f = fit->second;
         l += f.value * f.value;
     }
     return l;
 }
 void SeedListGenerator::convertSeedList(const FeatureMap& features,
                                         SeedList& seeds)
 {
   seeds.clear();
   for (FeatureMap::ConstIterator feat_it = features.begin();
        feat_it != features.end(); ++feat_it)
   {
     DPosition<2> point(feat_it->getRT(), feat_it->getMZ());
     seeds.push_back(point);
   }
 }
Example #6
0
CAddonButtonMap::DriverMap CAddonButtonMap::CreateLookupTable(const FeatureMap& features)
{
  DriverMap driverMap;

  for (FeatureMap::const_iterator it = features.begin(); it != features.end(); ++it)
  {
    const ADDON::JoystickFeature& feature = it->second;

    switch (feature.Type())
    {
      case JOYSTICK_FEATURE_TYPE_SCALAR:
      {
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Primitive())] = it->first;
        break;
      }

      case JOYSTICK_FEATURE_TYPE_ANALOG_STICK:
      {
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Up())] = it->first;
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Down())] = it->first;
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Right())] = it->first;
        driverMap[CPeripheralAddonTranslator::TranslatePrimitive(feature.Left())] = it->first;
        break;
      }

      case JOYSTICK_FEATURE_TYPE_ACCELEROMETER:
      {
        CDriverPrimitive x_axis(CPeripheralAddonTranslator::TranslatePrimitive(feature.PositiveX()));
        CDriverPrimitive y_axis(CPeripheralAddonTranslator::TranslatePrimitive(feature.PositiveY()));
        CDriverPrimitive z_axis(CPeripheralAddonTranslator::TranslatePrimitive(feature.PositiveZ()));

        driverMap[x_axis] = it->first;
        driverMap[y_axis] = it->first;
        driverMap[z_axis] = it->first;

        CDriverPrimitive x_axis_opposite(x_axis.Index(), x_axis.SemiAxisDirection() * -1);
        CDriverPrimitive y_axis_opposite(y_axis.Index(), y_axis.SemiAxisDirection() * -1);
        CDriverPrimitive z_axis_opposite(z_axis.Index(), z_axis.SemiAxisDirection() * -1);

        driverMap[x_axis_opposite] = it->first;
        driverMap[y_axis_opposite] = it->first;
        driverMap[z_axis_opposite] = it->first;
        break;
      }
        
      default:
        break;
    }
  }
  
  return driverMap;
}
  void markFeatureLocations_(FeatureMap & feature_map, MSExperiment<> & exp, QImage & image, bool transpose, QColor color)
  {
    double xcoef = image.width(), ycoef = image.height();
    if (transpose)
    {
      xcoef /= exp.getMaxRT() - exp.getMinRT();
      ycoef /= exp.getMaxMZ() - exp.getMinMZ();
    }
    else
    {
      xcoef /= exp.getMaxMZ() - exp.getMinMZ();
      ycoef /= exp.getMaxRT() - exp.getMinRT();
    }

    for (FeatureMap::Iterator feat_iter = feature_map.begin();
         feat_iter != feature_map.end(); ++feat_iter)
    {
      const ConvexHull2D convex_hull = feat_iter->getConvexHull();
      DBoundingBox<2> box = convex_hull.getBoundingBox();
      double rt = feat_iter->getRT();
      double mz = feat_iter->getMZ();
      double lower_mz = box.minY();
      double lower_rt = box.minX();
      double upper_mz = box.maxY();
      double upper_rt = box.maxX();

      int lx, ly, ux, uy, cx, cy;
      if (transpose)
      {
        lx = int(xcoef * (lower_rt - exp.getMinRT()));
        ly = int(ycoef * (exp.getMaxMZ() - lower_mz));
        ux = int(xcoef * (upper_rt - exp.getMinRT()));
        uy = int(ycoef * (exp.getMaxMZ() - upper_mz));
        cx = int(xcoef * (rt - exp.getMinRT()));
        cy = int(ycoef * (mz - lower_mz));
      }
      else
      {
        lx = int(xcoef * (lower_mz - exp.getMinMZ()));
        ly = int(ycoef * (exp.getMaxRT() - lower_rt));
        ux = int(xcoef * (upper_mz - exp.getMinMZ()));
        uy = int(ycoef * (exp.getMaxRT() - upper_rt));
        cx = int(xcoef * (mz - exp.getMinMZ()));
        cy = int(ycoef * (exp.getMaxRT() - rt));
      }

      addFeatureBox_(ly, lx, uy, ux, image, color);
      addPoint_(cx, cy, image, Qt::black); // mark center
    }
  }
Example #8
0
  void MapAlignmentTransformer::transformSingleFeatureMap(FeatureMap<> & fmap,
                                                          const TransformationDescription & trafo)
  {
    for (vector<Feature>::iterator fmit = fmap.begin(); fmit != fmap.end(); ++fmit)
    {
      applyToFeature_(*fmit, trafo);
    }

    // adapt RT values of unassigned peptides:
    if (!fmap.getUnassignedPeptideIdentifications().empty())
    {
      transformSinglePeptideIdentification(
        fmap.getUnassignedPeptideIdentifications(), trafo);
    }
  }
void QuantitativeExperimentalDesign::mergeFeatureMaps_(FeatureMap<> & out, const String & experiment, StringList & file_paths)
{
    FeatureMap<> map;

    LOG_INFO << "Merge feature maps: " << endl;
    UInt counter = 1;
    for (StringList::Iterator file_it = file_paths.begin(); file_it != file_paths.end(); ++file_it, ++counter)
    {
        //load should clear the map
        FeatureXMLFile().load(*file_it, map);
        for (FeatureMap<>::iterator it = map.begin(); it != map.end(); ++it)
        {
            it->setMetaValue("experiment", DataValue(experiment));
        }
        out += map;
    }
}
  void MapAlignmentTransformer::transformRetentionTimes(
    FeatureMap& fmap, const TransformationDescription& trafo,
    bool store_original_rt)
  {
    for (vector<Feature>::iterator fmit = fmap.begin(); fmit != fmap.end();
         ++fmit)
    {
      applyToFeature_(*fmit, trafo, store_original_rt);
    }

    // adapt RT values of unassigned peptides:
    if (!fmap.getUnassignedPeptideIdentifications().empty())
    {
      transformRetentionTimes(fmap.getUnassignedPeptideIdentifications(), trafo,
                              store_original_rt);
    }
  }
Int main()
{
  FeatureMap map;

  Feature feature;
  feature.setRT(15.0);
  feature.setMZ(571.3);
  map.push_back(feature); //append feature 1
  feature.setRT(23.3);
  feature.setMZ(1311.3);
  map.push_back(feature); //append feature 2


  for (FeatureMap::Iterator it = map.begin(); it != map.end(); ++it)
  {
    cout << it->getRT() << " - " << it->getMZ() << endl;
  }

  return 0;
} //end of main
  ExitCodes main_(int, const char**)
  {
    //input file names
    String in = getStringOption_("in");
    String out = getStringOption_("out");
    String out_mzq = getStringOption_("out_mzq");

    //prevent loading of fragment spectra
    PeakFileOptions options;
    options.setMSLevels(vector<Int>(1, 1));

    //reading input data
    MzMLFile f;
    f.getOptions() = options;
    f.setLogType(log_type_);

    PeakMap exp;
    f.load(in, exp);
    exp.updateRanges();

    if (exp.getSpectra().empty())
    {
      throw OpenMS::Exception::FileEmpty(__FILE__, __LINE__, __FUNCTION__, "Error: No MS1 spectra in input file.");
    }

    // determine type of spectral data (profile or centroided)
    SpectrumSettings::SpectrumType  spectrum_type = exp[0].getType();

    if (spectrum_type == SpectrumSettings::RAWDATA)
    {
      if (!getFlag_("force"))
      {
        throw OpenMS::Exception::IllegalArgument(__FILE__, __LINE__, __FUNCTION__, "Error: Profile data provided but centroided spectra expected. To enforce processing of the data set the -force flag.");
      }
    }

    //load seeds
    FeatureMap seeds;
    if (getStringOption_("seeds") != "")
    {
      FeatureXMLFile().load(getStringOption_("seeds"), seeds);
    }

    //setup of FeatureFinder
    FeatureFinder ff;
    ff.setLogType(log_type_);

    // A map for the resulting features
    FeatureMap features;

    // get parameters specific for the feature finder
    Param feafi_param = getParam_().copy("algorithm:", true);
    writeDebug_("Parameters passed to FeatureFinder", feafi_param, 3);

    // Apply the feature finder
    ff.run(FeatureFinderAlgorithmPicked::getProductName(), exp, features, feafi_param, seeds);
    features.applyMemberFunction(&UniqueIdInterface::setUniqueId);

    // DEBUG
    if (debug_level_ > 10)
    {
      FeatureMap::Iterator it;
      for (it = features.begin(); it != features.end(); ++it)
      {
        if (!it->isMetaEmpty())
        {
          vector<String> keys;
          it->getKeys(keys);
          LOG_INFO << "Feature " << it->getUniqueId() << endl;
          for (Size i = 0; i < keys.size(); i++)
          {
            LOG_INFO << "  " << keys[i] << " = " << it->getMetaValue(keys[i]) << endl;
          }
        }
      }
    }

    //-------------------------------------------------------------
    // writing files
    //-------------------------------------------------------------

    //annotate output with data processing info
    addDataProcessing_(features, getProcessingInfo_(DataProcessing::QUANTITATION));

    // write features to user specified output file
    FeatureXMLFile map_file;

    // Remove detailed convex hull information and subordinate features
    // (unless requested otherwise) to reduce file size of feature files
    // unless debugging is turned on.
    if (debug_level_ < 5)
    {
      FeatureMap::Iterator it;
      for (it = features.begin(); it != features.end(); ++it)
      {
        it->getConvexHull().expandToBoundingBox();
        for (Size i = 0; i < it->getConvexHulls().size(); ++i)
        {
          it->getConvexHulls()[i].expandToBoundingBox();
        }
        it->getSubordinates().clear();
      }
    }

    map_file.store(out, features);

    if (!out_mzq.trim().empty())
    {
      MSQuantifications msq(features, exp.getExperimentalSettings(), exp[0].getDataProcessing());
      msq.assignUIDs();
      MzQuantMLFile file;
      file.store(out_mzq, msq);
    }

    return EXECUTION_OK;
  }
Example #13
0
  ExitCodes main_(int, const char**)
  {

    //-------------------------------------------------------------
    // parameter handling
    //-------------------------------------------------------------
    // file list
    StringList file_list = getStringList_("in");

    // file type
    FileHandler file_handler;
    FileTypes::Type force_type;
    if (getStringOption_("in_type").size() > 0)
    {
      force_type = FileTypes::nameToType(getStringOption_("in_type"));
    }
    else
    {
      force_type = file_handler.getType(file_list[0]);
    }

    // output file names and types
    String out_file = getStringOption_("out");

    bool annotate_file_origin =  getFlag_("annotate_file_origin");
    rt_gap_ = getDoubleOption_("rt_concat:gap");
    vector<String> trafo_out = getStringList_("rt_concat:trafo_out");
    if (trafo_out.empty())
    {
      // resize now so we don't have to worry about indexing out of bounds:
      trafo_out.resize(file_list.size());
    }
    else if (trafo_out.size() != file_list.size())
    {
      writeLog_("Error: Number of transformation output files must equal the number of input files (parameters 'rt_concat:trafo_out'/'in')!");
      return ILLEGAL_PARAMETERS;
    }

    //-------------------------------------------------------------
    // calculations
    //-------------------------------------------------------------

    if (force_type == FileTypes::FEATUREXML)
    {
      FeatureMap out;
      FeatureXMLFile fh;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        FeatureMap map;
        fh.load(file_list[i], map);

        if (annotate_file_origin)
        {
          for (FeatureMap::iterator it = map.begin(); it != map.end(); ++it)
          {
            it->setMetaValue("file_origin", DataValue(file_list[i]));
          }
        }

        if (rt_gap_ > 0.0) // concatenate in RT
        {
          adjustRetentionTimes_(map, trafo_out[i], i == 0);
        }

        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      // annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      fh.store(out_file, out);
    }

    else if (force_type == FileTypes::CONSENSUSXML)
    {
      ConsensusMap out;
      ConsensusXMLFile fh;
      fh.load(file_list[0], out);
      // skip first file
      for (Size i = 1; i < file_list.size(); ++i)
      {
        ConsensusMap map;
        fh.load(file_list[i], map);

        if (annotate_file_origin)
        {
          for (ConsensusMap::iterator it = map.begin(); it != map.end(); ++it)
          {
            it->setMetaValue("file_origin", DataValue(file_list[i]));
          }
        }

        if (rt_gap_ > 0.0) // concatenate in RT
        {
          adjustRetentionTimes_(map, trafo_out[i], i == 0);
        }

        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      // annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      fh.store(out_file, out);
    }

    else if (force_type == FileTypes::TRAML)
    {
      TargetedExperiment out;
      TraMLFile fh;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        TargetedExperiment map;
        fh.load(file_list[i], map);
        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      // annotate output with data processing info
      Software software;
      software.setName("FileMerger");
      software.setVersion(VersionInfo::getVersion());
      out.addSoftware(software);

      fh.store(out_file, out);
    }
    else // raw data input (e.g. mzML)
    {
      // RT
      bool rt_auto_number = getFlag_("raw:rt_auto");
      bool rt_filename = getFlag_("raw:rt_filename");
      bool rt_custom = false;
      DoubleList custom_rts = getDoubleList_("raw:rt_custom");
      if (!custom_rts.empty())
      {
        rt_custom = true;
        if (custom_rts.size() != file_list.size())
        {
          writeLog_("Custom retention time list (parameter 'raw:rt_custom') must have as many elements as there are input files (parameter 'in')!");
          return ILLEGAL_PARAMETERS;
        }
      }

      // MS level
      Int ms_level = getIntOption_("raw:ms_level");

      MSExperiment<> out;
      UInt rt_auto = 0;
      UInt native_id = 0;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        String filename = file_list[i];

        // load file
        force_type = file_handler.getType(file_list[i]);
        MSExperiment<> in;
        file_handler.loadExperiment(filename, in, force_type, log_type_);

        if (in.empty() && in.getChromatograms().empty())
        {
          writeLog_(String("Warning: Empty file '") + filename + "'!");
          continue;
        }
        out.reserve(out.size() + in.size());

        // warn if custom RT and more than one scan in input file
        if (rt_custom && in.size() > 1)
        {
          writeLog_(String("Warning: More than one scan in file '") + filename + "'! All scans will have the same retention time!");
        }

        // handle special raw data options:
        for (MSExperiment<>::iterator spec_it = in.begin();
             spec_it != in.end(); ++spec_it)
        {
          float rt_final = spec_it->getRT();
          if (rt_auto_number)
          {
            rt_final = ++rt_auto;
          }
          else if (rt_custom)
          {
            rt_final = custom_rts[i];
          }
          else if (rt_filename)
          {
            static const boost::regex re("rt(\\d+(\\.\\d+)?)");
            boost::smatch match;
            bool found = boost::regex_search(filename, match, re);
            if (found)
            {
              rt_final = String(match[1]).toFloat();
            }
            else
            {
              writeLog_("Warning: could not extract retention time from filename '" + filename + "'");
            }
          }

          // none of the rt methods were successful
          if (rt_final < 0)
          {
            writeLog_(String("Warning: No valid retention time for output scan '") + rt_auto + "' from file '" + filename + "'");
          }

          spec_it->setRT(rt_final);
          spec_it->setNativeID("spectrum=" + String(native_id));
          if (ms_level > 0)
          {
            spec_it->setMSLevel(ms_level);
          }
          ++native_id;
        }

        // if we have only one spectrum, we can annotate it directly, for more spectra, we just name the source file leaving the spectra unannotated (to avoid a long and redundant list of sourceFiles)
        if (in.size() == 1)
        {
          in[0].setSourceFile(in.getSourceFiles()[0]);
          in.getSourceFiles().clear(); // delete source file annotated from source file (it's in the spectrum anyways)
        }

        if (rt_gap_ > 0.0) // concatenate in RT
        {
          adjustRetentionTimes_(in, trafo_out[i], i == 0);
        }

        // add spectra to output
        for (MSExperiment<>::const_iterator spec_it = in.begin();
             spec_it != in.end(); ++spec_it)
        {
          out.addSpectrum(*spec_it);
        }
        // also add the chromatograms
        for (vector<MSChromatogram<ChromatogramPeak> >::const_iterator
               chrom_it = in.getChromatograms().begin(); chrom_it != 
               in.getChromatograms().end(); ++chrom_it)
        {
          out.addChromatogram(*chrom_it);
        }

        // copy experimental settings from first file
        if (i == 0)
        {
          out.ExperimentalSettings::operator=(in);
        }
        else // otherwise append
        {
          out.getSourceFiles().insert(out.getSourceFiles().end(), in.getSourceFiles().begin(), in.getSourceFiles().end()); // could be emtpty if spectrum was annotated above, but that's ok then
        }
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      // annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      MzMLFile f;
      f.setLogType(log_type_);
      f.store(out_file, out);
    }

    return EXECUTION_OK;
  }
Example #14
0
  ExitCodes main_(int, const char **)
  {
    String in = getStringOption_("in"), out = getStringOption_("out"),
           id_out = getStringOption_("id_out");

    if (out.empty() && id_out.empty())
    {
      throw Exception::RequiredParameterNotGiven(__FILE__, __LINE__,
                                                 __PRETTY_FUNCTION__,
                                                 "out/id_out");
    }

    vector<ProteinIdentification> proteins;
    vector<PeptideIdentification> peptides;

    FileTypes::Type in_type = FileHandler::getType(in);

    if (in_type == FileTypes::MZML)
    {
      MSExperiment<> experiment;
      MzMLFile().load(in, experiment);
      // what about unassigned peptide IDs?
      for (MSExperiment<>::Iterator exp_it = experiment.begin();
           exp_it != experiment.end(); ++exp_it)
      {
        peptides.insert(peptides.end(),
                        exp_it->getPeptideIdentifications().begin(),
                        exp_it->getPeptideIdentifications().end());
        exp_it->getPeptideIdentifications().clear();
      }
      experiment.getProteinIdentifications().swap(proteins);
      if (!out.empty())
      {
        addDataProcessing_(experiment,
                           getProcessingInfo_(DataProcessing::FILTERING));
        MzMLFile().store(out, experiment);
      }
    }
    else if (in_type == FileTypes::FEATUREXML)
    {
      FeatureMap features;
      FeatureXMLFile().load(in, features);
      features.getUnassignedPeptideIdentifications().swap(peptides);
      for (FeatureMap::Iterator feat_it = features.begin();
           feat_it != features.end(); ++feat_it)
      {
        peptides.insert(peptides.end(),
                        feat_it->getPeptideIdentifications().begin(),
                        feat_it->getPeptideIdentifications().end());
        feat_it->getPeptideIdentifications().clear();
      }
      features.getProteinIdentifications().swap(proteins);
      if (!out.empty())
      {
        addDataProcessing_(features,
                           getProcessingInfo_(DataProcessing::FILTERING));
        FeatureXMLFile().store(out, features);
      }
    }
    else         // consensusXML
    {
      ConsensusMap consensus;
      ConsensusXMLFile().load(in, consensus);
      consensus.getUnassignedPeptideIdentifications().swap(peptides);
      for (ConsensusMap::Iterator cons_it = consensus.begin();
           cons_it != consensus.end(); ++cons_it)
      {
        peptides.insert(peptides.end(),
                        cons_it->getPeptideIdentifications().begin(),
                        cons_it->getPeptideIdentifications().end());
        cons_it->getPeptideIdentifications().clear();
      }
      consensus.getProteinIdentifications().swap(proteins);
      if (!out.empty())
      {
        addDataProcessing_(consensus,
                           getProcessingInfo_(DataProcessing::FILTERING));
        ConsensusXMLFile().store(out, consensus);
      }
    }

    if (!id_out.empty())
    {
      // IDMapper can match a peptide ID to several overlapping features,
      // resulting in duplicates; this shouldn't be the case for peak data
      if (in_type != FileTypes::MZML) removeDuplicates_(peptides);
      IdXMLFile().store(id_out, proteins, peptides);
    }

    return EXECUTION_OK;
  }
Example #15
0
  ExitCodes common_main_(FeatureGroupingAlgorithm * algorithm,
                         bool labeled = false)
  {
    //-------------------------------------------------------------
    // parameter handling
    //-------------------------------------------------------------
    StringList ins;
    if (labeled) ins.push_back(getStringOption_("in"));
    else ins = getStringList_("in");
    String out = getStringOption_("out");

    //-------------------------------------------------------------
    // check for valid input
    //-------------------------------------------------------------
    // check if all input files have the correct type
    FileTypes::Type file_type = FileHandler::getType(ins[0]);
    for (Size i = 0; i < ins.size(); ++i)
    {
      if (FileHandler::getType(ins[i]) != file_type)
      {
        writeLog_("Error: All input files must be of the same type!");
        return ILLEGAL_PARAMETERS;
      }
    }

    //-------------------------------------------------------------
    // set up algorithm
    //-------------------------------------------------------------
    Param algorithm_param = getParam_().copy("algorithm:", true);
    writeDebug_("Used algorithm parameters", algorithm_param, 3);
    algorithm->setParameters(algorithm_param);

    //-------------------------------------------------------------
    // perform grouping
    //-------------------------------------------------------------
    // load input
    ConsensusMap out_map;
    StringList ms_run_locations;
    if (file_type == FileTypes::FEATUREXML)
    {
      vector<ConsensusMap > maps(ins.size());
      FeatureXMLFile f;
      FeatureFileOptions param = f.getOptions();
      // to save memory don't load convex hulls and subordinates
      param.setLoadSubordinates(false);
      param.setLoadConvexHull(false);
      f.setOptions(param);

      Size progress = 0;
      setLogType(ProgressLogger::CMD);
      startProgress(0, ins.size(), "reading input");
      for (Size i = 0; i < ins.size(); ++i)
      {
        FeatureMap tmp;
        f.load(ins[i], tmp);
        out_map.getFileDescriptions()[i].filename = ins[i];
        out_map.getFileDescriptions()[i].size = tmp.size();
        out_map.getFileDescriptions()[i].unique_id = tmp.getUniqueId();

        // copy over information on the primary MS run
        const StringList& ms_runs = tmp.getPrimaryMSRunPath();
        ms_run_locations.insert(ms_run_locations.end(), ms_runs.begin(), ms_runs.end());

        // to save memory, remove convex hulls, subordinates:
        for (FeatureMap::Iterator it = tmp.begin(); it != tmp.end();
             ++it)
        {
          it->getSubordinates().clear();
          it->getConvexHulls().clear();
          it->clearMetaInfo();
        }

        MapConversion::convert(i, tmp, maps[i]);

        maps[i].updateRanges();

        setProgress(progress++);
      }
      endProgress();

      // exception for "labeled" algorithms: copy file descriptions
      if (labeled)
      {
        out_map.getFileDescriptions()[1] = out_map.getFileDescriptions()[0];
        out_map.getFileDescriptions()[0].label = "light";
        out_map.getFileDescriptions()[1].label = "heavy";
      }

      // group
      algorithm->group(maps, out_map);
    }
    else
    {
      vector<ConsensusMap> maps(ins.size());
      ConsensusXMLFile f;
      for (Size i = 0; i < ins.size(); ++i)
      {
        f.load(ins[i], maps[i]);
        maps[i].updateRanges();
        // copy over information on the primary MS run
        const StringList& ms_runs = maps[i].getPrimaryMSRunPath();
        ms_run_locations.insert(ms_run_locations.end(), ms_runs.begin(), ms_runs.end());
      }
      // group
      algorithm->group(maps, out_map);

      // set file descriptions:
      bool keep_subelements = getFlag_("keep_subelements");
      if (!keep_subelements)
      {
        for (Size i = 0; i < ins.size(); ++i)
        {
          out_map.getFileDescriptions()[i].filename = ins[i];
          out_map.getFileDescriptions()[i].size = maps[i].size();
          out_map.getFileDescriptions()[i].unique_id = maps[i].getUniqueId();
        }
      }
      else
      {
        // components of the output map are not the input maps themselves, but
        // the components of the input maps:
        algorithm->transferSubelements(maps, out_map);
      }
    }

    // assign unique ids
    out_map.applyMemberFunction(&UniqueIdInterface::setUniqueId);

    // annotate output with data processing info
    addDataProcessing_(out_map,
                       getProcessingInfo_(DataProcessing::FEATURE_GROUPING));

    // set primary MS runs
    out_map.setPrimaryMSRunPath(ms_run_locations);

    // write output
    ConsensusXMLFile().store(out, out_map);

    // some statistics
    map<Size, UInt> num_consfeat_of_size;
    for (ConsensusMap::const_iterator cmit = out_map.begin();
         cmit != out_map.end(); ++cmit)
    {
      ++num_consfeat_of_size[cmit->size()];
    }

    LOG_INFO << "Number of consensus features:" << endl;
    for (map<Size, UInt>::reverse_iterator i = num_consfeat_of_size.rbegin();
         i != num_consfeat_of_size.rend(); ++i)
    {
      LOG_INFO << "  of size " << setw(2) << i->first << ": " << setw(6) 
               << i->second << endl;
    }
    LOG_INFO << "  total:      " << setw(6) << out_map.size() << endl;

    return EXECUTION_OK;
  }
  ExitCodes main_(int, const char **)
  {
    //input and output file names ..
    String in = getStringOption_("in");
    String out = getStringOption_("out");

    //prevent loading of fragment spectra
    PeakFileOptions options;
    options.setMSLevels(vector<Int>(1, 1));

    //reading input data
    MzMLFile f;
    f.getOptions() = options;
    f.setLogType(log_type_);

    PeakMap exp;
    f.load(in, exp);
    exp.updateRanges();

    //no seeds supported
    FeatureMap<> seeds;

    //setup of FeatureFinder
    FeatureFinder ff;
    ff.setLogType(log_type_);

    // A map for the resulting features
    FeatureMap<> features;

    // get parameters specific for the feature finder
    Param feafi_param = getParam_().copy("algorithm:", true);
    writeDebug_("Parameters passed to FeatureFinder", feafi_param, 3);

    // Apply the feature finder
    ff.run(FeatureFinderAlgorithmIsotopeWavelet<Peak1D, Feature>::getProductName(), exp, features, feafi_param, seeds);
    features.applyMemberFunction(&UniqueIdInterface::setUniqueId);

    // DEBUG
    if (debug_level_ > 10)
    {
      FeatureMap<>::Iterator it;
      for (it = features.begin(); it != features.end(); ++it)
      {
        if (!it->isMetaEmpty())
        {
          vector<String> keys;
          it->getKeys(keys);
          LOG_INFO << "Feature " << it->getUniqueId() << endl;
          for (Size i = 0; i < keys.size(); i++)
          {
            LOG_INFO << "  " << keys[i] << " = " << it->getMetaValue(keys[i]) << endl;
          }
        }
      }
    }

    //-------------------------------------------------------------
    // writing files
    //-------------------------------------------------------------

    //annotate output with data processing info
    addDataProcessing_(features, getProcessingInfo_(DataProcessing::QUANTITATION));

    // write features to user specified output file
    FeatureXMLFile map_file;
    map_file.store(out, features);

    return EXECUTION_OK;
  }
Example #17
0
 FeatureIterator begin() {
     return feat_map.begin();
 }
Example #18
0
  ExitCodes main_(int, const char**)
  {

    //-------------------------------------------------------------
    // parameter handling
    //-------------------------------------------------------------
    //file list
    StringList file_list = getStringList_("in");

    //file type
    FileHandler fh;
    FileTypes::Type force_type;
    if (getStringOption_("in_type").size() > 0)
    {
      force_type = FileTypes::nameToType(getStringOption_("in_type"));
    }
    else
    {
      force_type = fh.getType(file_list[0]);
    }

    //output file names and types
    String out_file = getStringOption_("out");

    //-------------------------------------------------------------
    // calculations
    //-------------------------------------------------------------

    bool annotate_file_origin =  getFlag_("annotate_file_origin");

    if (force_type == FileTypes::FEATUREXML)
    {
      FeatureMap<> out;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        FeatureMap<> map;
        FeatureXMLFile fh;
        fh.load(file_list[i], map);

        if (annotate_file_origin)
        {
          for (FeatureMap<>::iterator it = map.begin(); it != map.end(); ++it)
          {
            it->setMetaValue("file_origin", DataValue(file_list[i]));
          }
        }
        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      //annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      FeatureXMLFile f;
      f.store(out_file, out);

    }
    else if (force_type == FileTypes::CONSENSUSXML)
    {
      ConsensusMap out;
      ConsensusXMLFile fh;
      fh.load(file_list[0], out);
      //skip first file
      for (Size i = 1; i < file_list.size(); ++i)
      {
        ConsensusMap map;
        ConsensusXMLFile fh;
        fh.load(file_list[i], map);

        if (annotate_file_origin)
        {
          for (ConsensusMap::iterator it = map.begin(); it != map.end(); ++it)
          {
            it->setMetaValue("file_origin", DataValue(file_list[i]));
          }
        }
        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      //annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      ConsensusXMLFile f;
      f.store(out_file, out);
    }
    else if (force_type == FileTypes::TRAML)
    {
      TargetedExperiment out;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        TargetedExperiment map;
        TraMLFile fh;
        fh.load(file_list[i], map);
        out += map;
      }

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      //annotate output with data processing info
      Software software;
      software.setName("FileMerger");
      software.setVersion(VersionInfo::getVersion());
      out.addSoftware(software);

      TraMLFile f;
      f.store(out_file, out);
    }
    else
    {
      // we might want to combine different types, thus we only
      // query in_type (which applies to all files)
      // and not the suffix or content of a single file
      force_type = FileTypes::nameToType(getStringOption_("in_type"));

      //rt
      bool rt_auto_number = getFlag_("raw:rt_auto");
      bool rt_filename = getFlag_("raw:rt_filename");
      bool rt_custom = false;
      DoubleList custom_rts = getDoubleList_("raw:rt_custom");
      if (custom_rts.size() != 0)
      {
        rt_custom = true;
        if (custom_rts.size() != file_list.size())
        {
          writeLog_("Custom retention time list must have as many elements as there are input files!");
          printUsage_();
          return ILLEGAL_PARAMETERS;
        }
      }

      //ms level
      bool user_ms_level = getFlag_("raw:user_ms_level");

      MSExperiment<> out;
      out.reserve(file_list.size());
      UInt rt_auto = 0;
      UInt native_id = 0;
      std::vector<MSChromatogram<ChromatogramPeak> > all_chromatograms;
      for (Size i = 0; i < file_list.size(); ++i)
      {
        String filename = file_list[i];

        //load file
        MSExperiment<> in;
        fh.loadExperiment(filename, in, force_type, log_type_);
        if (in.empty() && in.getChromatograms().empty())
        {
          writeLog_(String("Warning: Empty file '") + filename + "'!");
          continue;
        }
        out.reserve(out.size() + in.size());

        //warn if custom RT and more than one scan in input file
        if (rt_custom && in.size() > 1)
        {
          writeLog_(String("Warning: More than one scan in file '") + filename + "'! All scans will have the same retention time!");
        }

        for (MSExperiment<>::const_iterator it2 = in.begin(); it2 != in.end(); ++it2)
        {
          //handle rt
          Real rt_final = it2->getRT();
          if (rt_auto_number)
          {
            rt_final = ++rt_auto;
          }
          else if (rt_custom)
          {
            rt_final = custom_rts[i];
          }
          else if (rt_filename)
          {
            if (!filename.hasSubstring("rt"))
            {
              writeLog_(String("Warning: cannot guess retention time from filename as it does not contain 'rt'"));
            }
            for (Size i = 0; i < filename.size(); ++i)
            {
              if (filename[i] == 'r' && ++i != filename.size() && filename[i] == 't' && ++i != filename.size() && isdigit(filename[i]))
              {
                String rt;
                while (i != filename.size() && (filename[i] == '.' || isdigit(filename[i])))
                {
                  rt += filename[i++];
                }
                if (rt.size() > 0)
                {
                  // remove dot from rt3892.98.dta
                  //                          ^
                  if (rt[rt.size() - 1] == '.')
                  {
                    // remove last character
                    rt.erase(rt.end() - 1);
                  }
                }
                try
                {
                  float tmp = rt.toFloat();
                  rt_final = tmp;
                }
                catch (Exception::ConversionError)
                {
                  writeLog_(String("Warning: cannot convert the found retention time in a value '" + rt + "'."));
                }
              }
            }
          }

          // none of the rt methods were successful
          if (rt_final == -1)
          {
            writeLog_(String("Warning: No valid retention time for output scan '") + rt_auto + "' from file '" + filename + "'");
          }

          out.addSpectrum(*it2);
          out.getSpectra().back().setRT(rt_final);
          out.getSpectra().back().setNativeID(native_id);

          if (user_ms_level)
          {
            out.getSpectra().back().setMSLevel((int)getIntOption_("raw:ms_level"));
          }
          ++native_id;
        }

        // if we had only one spectrum, we can annotate it directly, for more spectra, we just name the source file leaving the spectra unannotated (to avoid a long and redundant list of sourceFiles)
        if (in.size() == 1)
        {
          out.getSpectra().back().setSourceFile(in.getSourceFiles()[0]);
          in.getSourceFiles().clear();   // delete source file annotated from source file (its in the spectrum anyways)
        }
        // copy experimental settings from first file
        if (i == 0)
        {
          out.ExperimentalSettings::operator=(in);
        }
        else // otherwise append
        {
          out.getSourceFiles().insert(out.getSourceFiles().end(), in.getSourceFiles().begin(), in.getSourceFiles().end()); // could be emtpty if spectrum was annotated above, but that's ok then
        }

        // also add the chromatograms
        for (std::vector<MSChromatogram<ChromatogramPeak> >::const_iterator it2 = in.getChromatograms().begin(); it2 != in.getChromatograms().end(); ++it2)
        {
          all_chromatograms.push_back(*it2);
        }

      }
      // set the chromatograms
      out.setChromatograms(all_chromatograms);

      //-------------------------------------------------------------
      // writing output
      //-------------------------------------------------------------

      //annotate output with data processing info
      addDataProcessing_(out, getProcessingInfo_(DataProcessing::FORMAT_CONVERSION));

      MzMLFile f;
      f.setLogType(log_type_);
      f.store(out_file, out);

    }

    return EXECUTION_OK;
  }
  // searches for a features with coordinates within the tolerance in this map
  // NOTE: It might happen that there are several features at similar coordinates.
  // In this case, the program cannot be sure which one is the correct. So we decided
  // to use the one with the strongest intensity.
  bool readMapFile_(String filename, vector<double> & intensities,
                    CoordinateType tol_mz, CoordinateType tol_rt,
                    DPosition<2> fpos1, DPosition<2> fpos2)
  {

    if (!File::exists(filename))
    {
      cout << "File " << filename << " not found. " << endl;
      return false;
    }

    cout << "Reading from " << filename << endl;

    FeatureXMLFile map_file;
    FeatureMap map;
    map_file.load(filename, map);

    Feature * feat1 = 0;
    Feature * feat2 = 0;

    FeatureMap::iterator iter = map.begin();
    while (iter != map.end())
    {

//          cout << "Read: " << *iter << endl;

      if ((iter->getRT() <  fpos1[Feature::RT] + tol_rt) &&
          (iter->getRT() >  fpos1[Feature::RT] - tol_rt) &&
          (iter->getMZ() <  fpos1[Feature::MZ] + tol_mz) &&
          (iter->getMZ() >  fpos1[Feature::MZ] - tol_mz))
      {
//              cout << "Found feature1 at " << endl;
//              cout << iter->getRT() << " " << iter->getMZ()  << " " << iter->getIntensity() <<  endl;
        // feature at correct position found, save intensity
        if (!feat1)
        {
          feat1 = &(*iter);
        }
        else if (feat1->getIntensity() <  iter->getIntensity())
        {
          feat1 = &(*iter);
        }
        //              f1_sum += iter->getIntensity();

      }

      if ((iter->getRT() <  fpos2[Feature::RT] + tol_rt) &&
          (iter->getRT() >  fpos2[Feature::RT] - tol_rt) &&
          (iter->getMZ() <  fpos2[Feature::MZ] + tol_mz) &&
          (iter->getMZ() >  fpos2[Feature::MZ] - tol_mz))
      {
//              cout << "Found feature2 at " << endl;
//              cout << iter->getRT() << " " << iter->getMZ() << " " << iter->getIntensity() <<  endl;
        // same as above
        if (!feat2)
        {
          feat2 = &(*iter);
        }
        else if (feat2->getIntensity() <  iter->getIntensity())
        {
          feat2 = &(*iter);
        }

        //              f2_sum += iter->getIntensity();
      }

      ++iter;
    }       // end of while

    if (feat1 != 0 && feat2 != 0)      //(f1_sum != 0 && f2_sum != 0)
    {
      cout << "Feature 1: " << *feat1 << endl;
      cout << "Feature 2: " << *feat2 << endl;
      cout << "Intensity ratio : " << (feat1->getIntensity() / feat2->getIntensity()) << endl;
      intensities.push_back(feat1->getIntensity() / feat2->getIntensity());

      return true;
    }
    if (!feat1)
      writeDebug_(String("Feature 1 was not found. "), 1);

    if (!feat2)
      writeDebug_(String("Feature 2 was not found. "), 1);

    return false;
  }