ExitCodes common_main_(FeatureGroupingAlgorithm * algorithm, bool labeled = false) { //------------------------------------------------------------- // parameter handling //------------------------------------------------------------- StringList ins; if (labeled) ins.push_back(getStringOption_("in")); else ins = getStringList_("in"); String out = getStringOption_("out"); //------------------------------------------------------------- // check for valid input //------------------------------------------------------------- // check if all input files have the correct type FileTypes::Type file_type = FileHandler::getType(ins[0]); for (Size i = 0; i < ins.size(); ++i) { if (FileHandler::getType(ins[i]) != file_type) { writeLog_("Error: All input files must be of the same type!"); return ILLEGAL_PARAMETERS; } } //------------------------------------------------------------- // set up algorithm //------------------------------------------------------------- Param algorithm_param = getParam_().copy("algorithm:", true); writeDebug_("Used algorithm parameters", algorithm_param, 3); algorithm->setParameters(algorithm_param); //------------------------------------------------------------- // perform grouping //------------------------------------------------------------- // load input ConsensusMap out_map; StringList ms_run_locations; if (file_type == FileTypes::FEATUREXML) { vector<ConsensusMap > maps(ins.size()); FeatureXMLFile f; FeatureFileOptions param = f.getOptions(); // to save memory don't load convex hulls and subordinates param.setLoadSubordinates(false); param.setLoadConvexHull(false); f.setOptions(param); Size progress = 0; setLogType(ProgressLogger::CMD); startProgress(0, ins.size(), "reading input"); for (Size i = 0; i < ins.size(); ++i) { FeatureMap tmp; f.load(ins[i], tmp); out_map.getFileDescriptions()[i].filename = ins[i]; out_map.getFileDescriptions()[i].size = tmp.size(); out_map.getFileDescriptions()[i].unique_id = tmp.getUniqueId(); // copy over information on the primary MS run const StringList& ms_runs = tmp.getPrimaryMSRunPath(); ms_run_locations.insert(ms_run_locations.end(), ms_runs.begin(), ms_runs.end()); // to save memory, remove convex hulls, subordinates: for (FeatureMap::Iterator it = tmp.begin(); it != tmp.end(); ++it) { it->getSubordinates().clear(); it->getConvexHulls().clear(); it->clearMetaInfo(); } MapConversion::convert(i, tmp, maps[i]); maps[i].updateRanges(); setProgress(progress++); } endProgress(); // exception for "labeled" algorithms: copy file descriptions if (labeled) { out_map.getFileDescriptions()[1] = out_map.getFileDescriptions()[0]; out_map.getFileDescriptions()[0].label = "light"; out_map.getFileDescriptions()[1].label = "heavy"; } // group algorithm->group(maps, out_map); } else { vector<ConsensusMap> maps(ins.size()); ConsensusXMLFile f; for (Size i = 0; i < ins.size(); ++i) { f.load(ins[i], maps[i]); maps[i].updateRanges(); // copy over information on the primary MS run const StringList& ms_runs = maps[i].getPrimaryMSRunPath(); ms_run_locations.insert(ms_run_locations.end(), ms_runs.begin(), ms_runs.end()); } // group algorithm->group(maps, out_map); // set file descriptions: bool keep_subelements = getFlag_("keep_subelements"); if (!keep_subelements) { for (Size i = 0; i < ins.size(); ++i) { out_map.getFileDescriptions()[i].filename = ins[i]; out_map.getFileDescriptions()[i].size = maps[i].size(); out_map.getFileDescriptions()[i].unique_id = maps[i].getUniqueId(); } } else { // components of the output map are not the input maps themselves, but // the components of the input maps: algorithm->transferSubelements(maps, out_map); } } // assign unique ids out_map.applyMemberFunction(&UniqueIdInterface::setUniqueId); // annotate output with data processing info addDataProcessing_(out_map, getProcessingInfo_(DataProcessing::FEATURE_GROUPING)); // set primary MS runs out_map.setPrimaryMSRunPath(ms_run_locations); // write output ConsensusXMLFile().store(out, out_map); // some statistics map<Size, UInt> num_consfeat_of_size; for (ConsensusMap::const_iterator cmit = out_map.begin(); cmit != out_map.end(); ++cmit) { ++num_consfeat_of_size[cmit->size()]; } LOG_INFO << "Number of consensus features:" << endl; for (map<Size, UInt>::reverse_iterator i = num_consfeat_of_size.rbegin(); i != num_consfeat_of_size.rend(); ++i) { LOG_INFO << " of size " << setw(2) << i->first << ": " << setw(6) << i->second << endl; } LOG_INFO << " total: " << setw(6) << out_map.size() << endl; return EXECUTION_OK; }
m1.getDataProcessing().resize(1); m1.getProteinIdentifications().resize(1); m1.getUnassignedPeptideIdentifications().resize(1); m1.ensureUniqueId(); m2.setIdentifier ("321"); m2.getDataProcessing().resize(2); m2.getProteinIdentifications().resize(2); m2.getUnassignedPeptideIdentifications().resize(2); m2.push_back(Feature()); m2.push_back(Feature()); m1+=m2; TEST_EQUAL(m1.getIdentifier(), ""); TEST_EQUAL(UniqueIdInterface::isValid(m1.getUniqueId()), false); TEST_EQUAL(m1.getDataProcessing().size(), 3); TEST_EQUAL(m1.getProteinIdentifications().size(),3); TEST_EQUAL(m1.getUnassignedPeptideIdentifications().size(),3); TEST_EQUAL(m1.size(),3); END_SECTION START_SECTION((void sortByIntensity(bool reverse=false))) FeatureMap<> to_be_sorted; Feature f1; f1.setIntensity(10.0f); to_be_sorted.push_back(f1);