ExitCodes main_(int, const char**) { //------------------------------------------------------------- // parsing parameters //------------------------------------------------------------- String in(getStringOption_("in")); String out(getStringOption_("out")); String pair_in(getStringOption_("pair_in")); String feature_out(getStringOption_("feature_out")); double precursor_mass_tolerance(getDoubleOption_("precursor_mass_tolerance")); double RT_tolerance(getDoubleOption_("RT_tolerance")); double expansion_range(getDoubleOption_("expansion_range")); Size max_isotope(getIntOption_("max_isotope")); Int debug(getIntOption_("debug")); //------------------------------------------------------------- // reading input //------------------------------------------------------------- PeakMap exp; MzMLFile().load(in, exp); exp.sortSpectra(); exp.updateRanges(); // read pair file ifstream is(pair_in.c_str()); String line; vector<SILAC_pair> pairs; while (getline(is, line)) { line.trim(); if (line.empty() || line[0] == '#') { continue; } vector<String> split; line.split(' ', split); if (split.size() != 4) { cerr << "missformated line ('" << line << "') should be (space separated) 'm/z-light m/z-heavy charge rt'" << endl; } SILAC_pair p; p.mz_light = split[0].toDouble(); p.mz_heavy = split[1].toDouble(); p.charge = split[2].toInt(); p.rt = split[3].toDouble(); pairs.push_back(p); } is.close(); //------------------------------------------------------------- // calculations //------------------------------------------------------------- ConsensusMap results_map; results_map.getFileDescriptions()[0].label = "light"; results_map.getFileDescriptions()[0].filename = in; results_map.getFileDescriptions()[1].label = "heavy"; results_map.getFileDescriptions()[1].filename = in; FeatureFinderAlgorithmIsotopeWavelet iso_ff; Param ff_param(iso_ff.getParameters()); ff_param.setValue("max_charge", 3); ff_param.setValue("intensity_threshold", -1.0); iso_ff.setParameters(ff_param); FeatureFinder ff; ff.setLogType(ProgressLogger::NONE); vector<SILACQuantitation> quantlets; FeatureMap all_features; for (PeakMap::ConstIterator it = exp.begin(); it != exp.end(); ++it) { if (it->size() == 0 || it->getMSLevel() != 1 || !it->getInstrumentSettings().getZoomScan()) { continue; } PeakSpectrum new_spec = *it; // get spacing from data double min_spacing(numeric_limits<double>::max()); double last_mz(0); for (PeakSpectrum::ConstIterator pit = new_spec.begin(); pit != new_spec.end(); ++pit) { if (pit->getMZ() - last_mz < min_spacing) { min_spacing = pit->getMZ() - last_mz; } last_mz = pit->getMZ(); } writeDebug_("Min-spacing=" + String(min_spacing), 1); // split the spectrum into two subspectra, by using different hypothesis of // the SILAC pairs Size idx = 0; for (vector<SILAC_pair>::const_iterator pit = pairs.begin(); pit != pairs.end(); ++pit, ++idx) { // in RT window? if (fabs(it->getRT() - pit->rt) >= RT_tolerance) { continue; } // now excise the two ranges for the pair, complete isotope distributions of both, light and heavy PeakSpectrum light_spec, heavy_spec; light_spec.setRT(it->getRT()); heavy_spec.setRT(it->getRT()); for (PeakSpectrum::ConstIterator sit = it->begin(); sit != it->end(); ++sit) { double mz(sit->getMZ()); if (mz - (pit->mz_light - precursor_mass_tolerance) > 0 && (pit->mz_light + (double)max_isotope * Constants::NEUTRON_MASS_U / (double)pit->charge + precursor_mass_tolerance) - mz > 0) { light_spec.push_back(*sit); } if (mz - (pit->mz_heavy - precursor_mass_tolerance) > 0 && (pit->mz_heavy + (double)max_isotope * Constants::NEUTRON_MASS_U / (double)pit->charge + precursor_mass_tolerance) - mz > 0) { heavy_spec.push_back(*sit); } } // expand light spectrum Peak1D p; p.setIntensity(0); if (light_spec.size() > 0) { double lower_border = light_spec.begin()->getMZ() - expansion_range; for (double pos = light_spec.begin()->getMZ(); pos > lower_border; pos -= min_spacing) { p.setMZ(pos); light_spec.insert(light_spec.begin(), p); } double upper_border = light_spec.begin()->getMZ() - expansion_range; for (double pos = light_spec.rbegin()->getMZ(); pos < upper_border; pos += min_spacing) { p.setMZ(pos); light_spec.push_back(p); } } if (heavy_spec.size() > 0) { // expand heavy spectrum double lower_border = heavy_spec.begin()->getMZ() - expansion_range; for (double pos = heavy_spec.begin()->getMZ(); pos > lower_border; pos -= min_spacing) { p.setMZ(pos); heavy_spec.insert(heavy_spec.begin(), p); } double upper_border = heavy_spec.begin()->getMZ() - expansion_range; for (double pos = heavy_spec.rbegin()->getMZ(); pos < upper_border; pos += min_spacing) { p.setMZ(pos); heavy_spec.push_back(p); } } // create experiments for feature finding PeakMap new_exp_light, new_exp_heavy; new_exp_light.addSpectrum(light_spec); new_exp_heavy.addSpectrum(heavy_spec); if (debug > 9) { MzMLFile().store(String(it->getRT()) + "_debugging_light.mzML", new_exp_light); MzMLFile().store(String(it->getRT()) + "_debugging_heavy.mzML", new_exp_heavy); } writeDebug_("Spectrum-id: " + it->getNativeID() + " @ " + String(it->getRT()) + "s", 1); new_exp_light.updateRanges(); new_exp_heavy.updateRanges(); FeatureMap feature_map_light, feature_map_heavy, seeds; if (light_spec.size() > 0) { ff.run("isotope_wavelet", new_exp_light, feature_map_light, ff_param, seeds); } writeDebug_("#light_features=" + String(feature_map_light.size()), 1); if (heavy_spec.size() > 0) { ff.run("isotope_wavelet", new_exp_heavy, feature_map_heavy, ff_param, seeds); } writeDebug_("#heavy_features=" + String(feature_map_heavy.size()), 1); // search if feature maps to m/z value of pair vector<MatchedFeature> light, heavy; for (FeatureMap::const_iterator fit = feature_map_light.begin(); fit != feature_map_light.end(); ++fit) { all_features.push_back(*fit); light.push_back(MatchedFeature(*fit, idx)); } for (FeatureMap::const_iterator fit = feature_map_heavy.begin(); fit != feature_map_heavy.end(); ++fit) { all_features.push_back(*fit); heavy.push_back(MatchedFeature(*fit, idx)); } if (!heavy.empty() && !light.empty()) { writeDebug_("Finding best feature pair out of " + String(light.size()) + " light and " + String(heavy.size()) + " heavy matching features.", 1); // now find "good" matches, means the pair with the smallest m/z deviation Feature best_light, best_heavy; double best_deviation(numeric_limits<double>::max()); Size best_idx(pairs.size()); for (vector<MatchedFeature>::const_iterator fit1 = light.begin(); fit1 != light.end(); ++fit1) { for (vector<MatchedFeature>::const_iterator fit2 = heavy.begin(); fit2 != heavy.end(); ++fit2) { if (fit1->idx != fit2->idx || fit1->f.getCharge() != fit2->f.getCharge() || fabs(fit1->f.getMZ() - pairs[fit1->idx].mz_light) > precursor_mass_tolerance || fabs(fit2->f.getMZ() - pairs[fit2->idx].mz_heavy) > precursor_mass_tolerance) { continue; } double deviation(0); deviation = fabs((fit1->f.getMZ() - pairs[fit1->idx].mz_light) - (fit2->f.getMZ() - pairs[fit2->idx].mz_heavy)); if (deviation < best_deviation && deviation < precursor_mass_tolerance) { best_light = fit1->f; best_heavy = fit2->f; best_idx = fit1->idx; } } } if (best_idx == pairs.size()) { continue; } writeDebug_("Ratio: " + String(best_heavy.getIntensity() / best_light.getIntensity()), 1); ConsensusFeature SILAC_feature; SILAC_feature.setMZ((best_light.getMZ() + best_heavy.getMZ()) / 2.0); SILAC_feature.setRT((best_light.getRT() + best_heavy.getRT()) / 2.0); SILAC_feature.insert(0, best_light); SILAC_feature.insert(1, best_heavy); results_map.push_back(SILAC_feature); quantlets.push_back(SILACQuantitation(best_light.getIntensity(), best_heavy.getIntensity(), best_idx)); } } } // now calculate the final quantitation values from the quantlets Map<Size, vector<SILACQuantitation> > idx_to_quantlet; for (vector<SILACQuantitation>::const_iterator it = quantlets.begin(); it != quantlets.end(); ++it) { idx_to_quantlet[it->idx].push_back(*it); } for (Map<Size, vector<SILACQuantitation> >::ConstIterator it1 = idx_to_quantlet.begin(); it1 != idx_to_quantlet.end(); ++it1) { SILAC_pair silac_pair = pairs[it1->first]; // simply add up all intensities and calculate the final ratio double light_sum(0), heavy_sum(0); vector<double> light_ints, heavy_ints, ratios; for (vector<SILACQuantitation>::const_iterator it2 = it1->second.begin(); it2 != it1->second.end(); ++it2) { light_sum += it2->light_intensity; light_ints.push_back(it2->light_intensity); heavy_sum += it2->heavy_intensity; heavy_ints.push_back(it2->heavy_intensity); ratios.push_back(it2->heavy_intensity / it2->light_intensity * (it2->heavy_intensity + it2->light_intensity)); } double absdev_ratios = Math::absdev(ratios.begin(), ratios.begin() + (ratios.size()) / (heavy_sum + light_sum)); cout << "Ratio: " << silac_pair.mz_light << " <-> " << silac_pair.mz_heavy << " @ " << silac_pair.rt << " s, ratio(h/l) " << heavy_sum / light_sum << " +/- " << absdev_ratios << " (#scans for quantation: " << String(it1->second.size()) << " )" << endl; } //------------------------------------------------------------- // writing output //------------------------------------------------------------- if (feature_out != "") { FeatureXMLFile().store(feature_out, all_features); } writeDebug_("Writing output", 1); ConsensusXMLFile().store(out, results_map); return EXECUTION_OK; }
void CompNovoIonScoring::scoreSpectra(Map<double, IonScore> & ion_scores, PeakSpectrum & CID_spec, PeakSpectrum & ETD_spec, double precursor_weight, Size charge) { // adds single charged variants of putative single charged ions //addSingleChargedIons_(ion_scores, CID_spec); for (PeakSpectrum::ConstIterator it = CID_spec.begin(); it != CID_spec.end(); ++it) { double it_pos(it->getPosition()[0]); IonScore ion_score; ion_scores[it_pos] = ion_score; } for (PeakSpectrum::ConstIterator it = CID_spec.begin(); it != CID_spec.end(); ++it) { ion_scores[it->getPosition()[0]].s_isotope_pattern_1 = scoreIsotopes_(CID_spec, it, ion_scores, 1); if (it->getPosition()[0] < precursor_weight / 2.0) { ion_scores[it->getPosition()[0]].s_isotope_pattern_2 = scoreIsotopes_(CID_spec, it, ion_scores, 2); } else { ion_scores[it->getPosition()[0]].s_isotope_pattern_2 = -1; } } // find possible supporting ions from ETD spec to CID spec scoreETDFeatures_(charge, precursor_weight, ion_scores, CID_spec, ETD_spec); // combine the features and give b-ion scores scoreWitnessSet_(charge, precursor_weight, ion_scores, CID_spec); for (Map<double, IonScore>::iterator it = ion_scores.begin(); it != ion_scores.end(); ++it) { it->second.score = it->second.s_witness; } MassDecompositionAlgorithm decomp_algo; // check whether a PRMNode_ can be decomposed into amino acids // rescore the peaks that cannot be possible y-ion candidates double max_decomp_weight((double)param_.getValue("max_decomp_weight")); for (Map<double, IonScore>::iterator it = ion_scores.begin(); it != ion_scores.end(); ++it) { if (it->first > 19.0 && (it->first - 19.0) < max_decomp_weight) { vector<MassDecomposition> decomps; decomp_algo.getDecompositions(decomps, it->first - 19.0); #ifdef ION_SCORING_DEBUG cerr << "Decomps: " << it->first << " " << it->first - 19.0 << " " << decomps.size() << " " << it->second.score << endl; #endif if (decomps.empty()) { it->second.score = 0; } } if (it->first < precursor_weight && precursor_weight - it->first < max_decomp_weight) { vector<MassDecomposition> decomps; decomp_algo.getDecompositions(decomps, precursor_weight - it->first); #ifdef ION_SCORING_DEBUG cerr << "Decomps: " << it->first << " " << precursor_weight - it->first << " " << decomps.size() << " " << it->second.score << endl; #endif if (decomps.empty()) { it->second.score = 0; } } } ion_scores[CID_spec.begin()->getPosition()[0]].score = 1; ion_scores[(CID_spec.end() - 1)->getPosition()[0]].score = 1; }
ExitCodes main_(int, const char**) { // parsing parameters String in(getStringOption_("in")); String feature_in(getStringOption_("feature_in")); String out(getStringOption_("out")); double precursor_mass_tolerance(getDoubleOption_("precursor_mass_tolerance")); // reading input FileHandler fh; FileTypes::Type in_type = fh.getType(in); PeakMap exp; fh.loadExperiment(in, exp, in_type, log_type_, false, false); exp.sortSpectra(); FeatureMap feature_map; if (feature_in != "") { FeatureXMLFile().load(feature_in, feature_map); } // calculations FeatureFinderAlgorithmIsotopeWavelet iso_ff; Param ff_param(iso_ff.getParameters()); ff_param.setValue("max_charge", getIntOption_("max_charge")); ff_param.setValue("intensity_threshold", getDoubleOption_("intensity_threshold")); iso_ff.setParameters(ff_param); FeatureFinder ff; ff.setLogType(ProgressLogger::NONE); PeakMap exp2 = exp; exp2.clear(false); for (PeakMap::ConstIterator it = exp.begin(); it != exp.end(); ++it) { if (it->size() != 0) { exp2.addSpectrum(*it); } } exp = exp2; exp.updateRanges(); // TODO check MS2 and MS1 counts ProgressLogger progresslogger; progresslogger.setLogType(log_type_); progresslogger.startProgress(0, exp.size(), "Correcting precursor masses"); for (PeakMap::Iterator it = exp.begin(); it != exp.end(); ++it) { progresslogger.setProgress(exp.end() - it); if (it->getMSLevel() != 2) { continue; } // find first MS1 scan of the MS/MS scan PeakMap::Iterator ms1_it = it; while (ms1_it != exp.begin() && ms1_it->getMSLevel() != 1) { --ms1_it; } if (ms1_it == exp.begin() && ms1_it->getMSLevel() != 1) { writeLog_("Did not find a MS1 scan to the MS/MS scan at RT=" + String(it->getRT())); continue; } if (ms1_it->size() == 0) { writeDebug_("No peaks in scan at RT=" + String(ms1_it->getRT()) + String(", skipping"), 1); continue; } PeakMap::Iterator ms2_it = ms1_it; ++ms2_it; while (ms2_it != exp.end() && ms2_it->getMSLevel() == 2) { // first: error checks if (ms2_it->getPrecursors().empty()) { writeDebug_("Warning: found no precursors of spectrum RT=" + String(ms2_it->getRT()) + ", skipping it.", 1); ++ms2_it; continue; } else if (ms2_it->getPrecursors().size() > 1) { writeLog_("Warning: found more than one precursor of spectrum RT=" + String(ms2_it->getRT()) + ", using first one."); } Precursor prec = *ms2_it->getPrecursors().begin(); double prec_pos = prec.getMZ(); PeakMap new_exp; // now excise small region from the MS1 spec for the feature finder (isotope pattern must be covered...) PeakSpectrum zoom_spec; for (PeakSpectrum::ConstIterator pit = ms1_it->begin(); pit != ms1_it->end(); ++pit) { if (pit->getMZ() > prec_pos - 3 && pit->getMZ() < prec_pos + 3) { zoom_spec.push_back(*pit); } } new_exp.addSpectrum(zoom_spec); new_exp.updateRanges(); FeatureMap features, seeds; ff.run("isotope_wavelet", new_exp, features, ff_param, seeds); if (features.empty()) { writeDebug_("No features found for scan RT=" + String(ms1_it->getRT()), 1); ++ms2_it; continue; } double max_int(numeric_limits<double>::min()); double min_dist(numeric_limits<double>::max()); Size max_int_feat_idx(0); for (Size i = 0; i != features.size(); ++i) { if (fabs(features[i].getMZ() - prec_pos) < precursor_mass_tolerance && features[i].getIntensity() > max_int) { max_int_feat_idx = i; max_int = features[i].getIntensity(); min_dist = fabs(features[i].getMZ() - prec_pos); } } writeDebug_(" max_int=" + String(max_int) + " mz=" + String(features[max_int_feat_idx].getMZ()) + " charge=" + String(features[max_int_feat_idx].getCharge()), 5); if (min_dist < precursor_mass_tolerance) { prec.setMZ(features[max_int_feat_idx].getMZ()); prec.setCharge(features[max_int_feat_idx].getCharge()); vector<Precursor> precs; precs.push_back(prec); ms2_it->setPrecursors(precs); writeDebug_("Correcting precursor mass of spectrum RT=" + String(ms2_it->getRT()) + " from " + String(prec_pos) + " to " + String(prec.getMZ()) + " (z=" + String(prec.getCharge()) + ")", 1); } ++ms2_it; } it = --ms2_it; } progresslogger.endProgress(); // writing output fh.storeExperiment(out, exp, log_type_); return EXECUTION_OK; }
void CompNovoIdentificationCID::getIdentification(PeptideIdentification & id, const PeakSpectrum & CID_spec) { //if (CID_spec.getPrecursors().begin()->getMZ() > 1000.0) //{ //cerr << "Weight of precursor has been estimated to exceed 2000.0 Da which is the current limit" << endl; //return; //} PeakSpectrum new_CID_spec(CID_spec); windowMower_(new_CID_spec, 0.3, 1); Param zhang_param; zhang_param = zhang_.getParameters(); zhang_param.setValue("tolerance", fragment_mass_tolerance_); zhang_param.setValue("use_gaussian_factor", "true"); zhang_param.setValue("use_linear_factor", "false"); zhang_.setParameters(zhang_param); Normalizer normalizer; Param n_param(normalizer.getParameters()); n_param.setValue("method", "to_one"); normalizer.setParameters(n_param); normalizer.filterSpectrum(new_CID_spec); Size charge(2); double precursor_weight(0); // [M+H]+ if (!CID_spec.getPrecursors().empty()) { // believe charge of spectrum? if (CID_spec.getPrecursors().begin()->getCharge() != 0) { charge = CID_spec.getPrecursors().begin()->getCharge(); } else { // TODO estimate charge state } precursor_weight = CID_spec.getPrecursors().begin()->getMZ() * charge - ((charge - 1) * Constants::PROTON_MASS_U); } //cerr << "charge=" << charge << ", [M+H]=" << precursor_weight << endl; // now delete all peaks that are right of the estimated precursor weight Size peak_counter(0); for (PeakSpectrum::ConstIterator it = new_CID_spec.begin(); it != new_CID_spec.end(); ++it, ++peak_counter) { if (it->getPosition()[0] > precursor_weight) { break; } } if (peak_counter < new_CID_spec.size()) { new_CID_spec.resize(peak_counter); } static double oxonium_mass = EmpiricalFormula("H2O+").getMonoWeight(); Peak1D p; p.setIntensity(1); p.setPosition(oxonium_mass); new_CID_spec.push_back(p); p.setPosition(precursor_weight); new_CID_spec.push_back(p); // add complement to spectrum /* for (PeakSpectrum::ConstIterator it1 = CID_spec.begin(); it1 != CID_spec.end(); ++it1) { // get m/z of complement double mz_comp = precursor_weight - it1->getPosition()[0] + Constants::PROTON_MASS_U; // search if peaks are available that have similar m/z values Size count(0); bool found(false); for (PeakSpectrum::ConstIterator it2 = CID_spec.begin(); it2 != CID_spec.end(); ++it2, ++count) { if (fabs(mz_comp - it2->getPosition()[0]) < fragment_mass_tolerance) { // add peak intensity to corresponding peak in new_CID_spec new_CID_spec[count].setIntensity(new_CID_spec[count].getIntensity()); } } if (!found) { // infer this peak Peak1D p; p.setIntensity(it1->getIntensity()); p.setPosition(mz_comp); new_CID_spec.push_back(p); } }*/ CompNovoIonScoringCID ion_scoring; Param ion_scoring_param(ion_scoring.getParameters()); ion_scoring_param.setValue("fragment_mass_tolerance", fragment_mass_tolerance_); ion_scoring_param.setValue("precursor_mass_tolerance", precursor_mass_tolerance_); ion_scoring_param.setValue("decomp_weights_precision", decomp_weights_precision_); ion_scoring_param.setValue("double_charged_iso_threshold", (double)param_.getValue("double_charged_iso_threshold")); ion_scoring_param.setValue("max_isotope_to_score", param_.getValue("max_isotope_to_score")); ion_scoring_param.setValue("max_isotope", max_isotope_); ion_scoring.setParameters(ion_scoring_param); Map<double, IonScore> ion_scores; ion_scoring.scoreSpectrum(ion_scores, new_CID_spec, precursor_weight, charge); new_CID_spec.sortByPosition(); /* cerr << "Size of ion_scores " << ion_scores.size() << endl; for (Map<double, IonScore>::const_iterator it = ion_scores.begin(); it != ion_scores.end(); ++it) { cerr << it->first << " " << it->second.score << endl; }*/ #ifdef WRITE_SCORED_SPEC PeakSpectrum filtered_spec(new_CID_spec); filtered_spec.clear(); for (Map<double, CompNovoIonScoringCID::IonScore>::const_iterator it = ion_scores.begin(); it != ion_scores.end(); ++it) { Peak1D p; p.setIntensity(it->second.score); p.setPosition(it->first); filtered_spec.push_back(p); } DTAFile().store("spec_scored.dta", filtered_spec); #endif set<String> sequences; getDecompositionsDAC_(sequences, 0, new_CID_spec.size() - 1, precursor_weight, new_CID_spec, ion_scores); #ifdef SPIKE_IN sequences.insert("AFCVDGEGR"); sequences.insert("APEFAAPWPDFVPR"); sequences.insert("AVKQFEESQGR"); sequences.insert("CCTESLVNR"); sequences.insert("DAFLGSFLYEYSR"); sequences.insert("DAIPENLPPLTADFAEDK"); sequences.insert("DDNKVEDIWSFLSK"); sequences.insert("DDPHACYSTVFDK"); sequences.insert("DEYELLCLDGSR"); sequences.insert("DGAESYKELSVLLPNR"); sequences.insert("DGASCWCVDADGR"); sequences.insert("DLFIPTCLETGEFAR"); sequences.insert("DTHKSEIAHR"); sequences.insert("DVCKNYQEAK"); sequences.insert("EACFAVEGPK"); sequences.insert("ECCHGDLLECADDR"); sequences.insert("EFLGDKFYTVISSLK"); sequences.insert("EFTPVLQADFQK"); sequences.insert("ELFLDSGIFQPMLQGR"); sequences.insert("ETYGDMADCCEK"); sequences.insert("EVGCPSSSVQEMVSCLR"); sequences.insert("EYEATLEECCAK"); sequences.insert("FADLIQSGTFQLHLDSK"); sequences.insert("FFSASCVPGATIEQK"); sequences.insert("FLANVSTVLTSK"); sequences.insert("FLSGSDYAIR"); sequences.insert("FTASCPPSIK"); sequences.insert("GAIEWEGIESGSVEQAVAK"); sequences.insert("GDVAFIQHSTVEENTGGK"); sequences.insert("GEPPSCAEDQSCPSER"); sequences.insert("GEYVPTSLTAR"); sequences.insert("GQEFTITGQKR"); sequences.insert("GTFAALSELHCDK"); sequences.insert("HLVDEPQNLIK"); sequences.insert("HQDCLVTTLQTQPGAVR"); sequences.insert("HTTVNENAPDQK"); sequences.insert("ILDCGSPDTEVR"); sequences.insert("KCPSPCQLQAER"); sequences.insert("KGTEFTVNDLQGK"); sequences.insert("KQTALVELLK"); sequences.insert("KVPQVSTPTLVEVSR"); sequences.insert("LALQFTTNAKR"); sequences.insert("LCVLHEKTPVSEK"); sequences.insert("LFTFHADICTLPDTEK"); sequences.insert("LGEYGFQNALIVR"); sequences.insert("LHVDPENFK"); sequences.insert("LKECCDKPLLEK"); sequences.insert("LKHLVDEPQNLIK"); sequences.insert("LKPDPNTLCDEFK"); sequences.insert("LLGNVLVVVLAR"); sequences.insert("LLVVYPWTQR"); sequences.insert("LRVDPVNFK"); sequences.insert("LTDEELAFPPLSPSR"); sequences.insert("LVNELTEFAK"); sequences.insert("MFLSFPTTK"); sequences.insert("MPCTEDYLSLILNR"); sequences.insert("NAPYSGYSGAFHCLK"); sequences.insert("NECFLSHKDDSPDLPK"); sequences.insert("NEPNKVPACPGSCEEVK"); sequences.insert("NLQMDDFELLCTDGR"); sequences.insert("QAGVQAEPSPK"); sequences.insert("RAPEFAAPWPDFVPR"); sequences.insert("RHPEYAVSVLLR"); sequences.insert("RPCFSALTPDETYVPK"); sequences.insert("RSLLLAPEEGPVSQR"); sequences.insert("SAFPPEPLLCSVQR"); sequences.insert("SAGWNIPIGTLLHR"); sequences.insert("SCWCVDEAGQK"); sequences.insert("SGNPNYPHEFSR"); sequences.insert("SHCIAEVEK"); sequences.insert("SISSGFFECER"); sequences.insert("SKYLASASTMDHAR"); sequences.insert("SLHTLFGDELCK"); sequences.insert("SLLLAPEEGPVSQR"); sequences.insert("SPPQCSPDGAFRPVQCK"); sequences.insert("SREGDPLAVYLK"); sequences.insert("SRQIPQCPTSCER"); sequences.insert("TAGTPVSIPVCDDSSVK"); sequences.insert("TCVADESHAGCEK"); sequences.insert("TQFGCLEGFGR"); sequences.insert("TVMENFVAFVDK"); sequences.insert("TYFPHFDLSHGSAQVK"); sequences.insert("TYMLAFDVNDEK"); sequences.insert("VDEVGGEALGR"); sequences.insert("VDLLIGSSQDDGLINR"); sequences.insert("VEDIWSFLSK"); sequences.insert("VGGHAAEYGAEALER"); sequences.insert("VGTRCCTKPESER"); sequences.insert("VKVDEVGGEALGR"); sequences.insert("VKVDLLIGSSQDDGLINR"); sequences.insert("VLDSFSNGMK"); sequences.insert("VLSAADKGNVK"); sequences.insert("VPQVSTPTLVEVSR"); sequences.insert("VTKCCTESLVNR"); sequences.insert("VVAASDASQDALGCVK"); sequences.insert("VVAGVANALAHR"); sequences.insert("YICDNQDTISSK"); sequences.insert("YLASASTMDHAR"); sequences.insert("YNGVFQECCQAEDK"); #endif SpectrumAlignmentScore spectra_zhang; spectra_zhang.setParameters(zhang_param); vector<PeptideHit> hits; Size missed_cleavages = param_.getValue("missed_cleavages"); for (set<String>::const_iterator it = sequences.begin(); it != sequences.end(); ++it) { Size num_missed = countMissedCleavagesTryptic_(*it); if (missed_cleavages < num_missed) { //cerr << "Two many missed cleavages: " << *it << ", found " << num_missed << ", allowed " << missed_cleavages << endl; continue; } PeakSpectrum CID_sim_spec; getCIDSpectrum_(CID_sim_spec, *it, charge); //normalizer.filterSpectrum(CID_sim_spec); double cid_score = zhang_(CID_sim_spec, CID_spec); PeptideHit hit; hit.setScore(cid_score); hit.setSequence(getModifiedAASequence_(*it)); hit.setCharge((Int)charge); //TODO unify charge interface: int or size? hits.push_back(hit); //cerr << getModifiedAASequence_(*it) << " " << cid_score << " " << endl; } // rescore the top hits id.setHits(hits); id.assignRanks(); hits = id.getHits(); SpectrumAlignmentScore alignment_score; Param align_param(alignment_score.getParameters()); align_param.setValue("tolerance", fragment_mass_tolerance_); align_param.setValue("use_linear_factor", "true"); alignment_score.setParameters(align_param); for (vector<PeptideHit>::iterator it = hits.begin(); it != hits.end(); ++it) { //cerr << "Pre: " << it->getRank() << " " << it->getSequence() << " " << it->getScore() << " " << endl; } Size number_of_prescoring_hits = param_.getValue("number_of_prescoring_hits"); if (hits.size() > number_of_prescoring_hits) { hits.resize(number_of_prescoring_hits); } for (vector<PeptideHit>::iterator it = hits.begin(); it != hits.end(); ++it) { PeakSpectrum CID_sim_spec; getCIDSpectrum_(CID_sim_spec, getModifiedStringFromAASequence_(it->getSequence()), charge); normalizer.filterSpectrum(CID_sim_spec); //DTAFile().store("sim_specs/" + it->getSequence().toUnmodifiedString() + "_sim_CID.dta", CID_sim_spec); //double cid_score = spectra_zhang(CID_sim_spec, CID_spec); double cid_score = alignment_score(CID_sim_spec, CID_spec); //cerr << "Final: " << it->getSequence() << " " << cid_score << endl; it->setScore(cid_score); } id.setHits(hits); id.assignRanks(); hits = id.getHits(); for (vector<PeptideHit>::iterator it = hits.begin(); it != hits.end(); ++it) { //cerr << "Fin: " << it->getRank() << " " << it->getSequence() << " " << it->getScore() << " " << endl; } Size number_of_hits = param_.getValue("number_of_hits"); if (id.getHits().size() > number_of_hits) { hits.resize(number_of_hits); } id.setHits(hits); id.assignRanks(); return; }