void SVScorePairRefProcessor:: processClearedRecord( const bam_record& bamRead) { using namespace illumina::common; assert(bamParams.isSet); const pos_t refPos(bamRead.pos()-1); if (! bamParams.interval.range.is_pos_intersect(refPos)) return; const bool isLargeInsert(isLargeInsertSV(sv)); #ifdef DEBUG_MEGAPAIR log_os << __FUNCTION__ << ": read: " << bamRead << "\n"; #endif /// check if fragment is too big or too small: const int templateSize(std::abs(bamRead.template_size())); if (templateSize < bamParams.minFrag) return; if (templateSize > bamParams.maxFrag) return; // count only from the down stream reads const bool isFirstBamRead(isFirstRead(bamRead)); // get fragment range: pos_t fragBeginRefPos(refPos); if (! isFirstBamRead) { fragBeginRefPos=bamRead.mate_pos()-1; } const pos_t fragEndRefPos(fragBeginRefPos+templateSize); if (fragBeginRefPos > fragEndRefPos) { std::ostringstream oss; oss << "ERROR: Failed to parse fragment range from bam record. Frag begin,end: " << fragBeginRefPos << " " << fragEndRefPos << " bamRecord: " << bamRead << "\n"; BOOST_THROW_EXCEPTION(LogicException(oss.str())); } { const pos_t fragOverlap(std::min((1+svParams.centerPos-fragBeginRefPos), (fragEndRefPos-svParams.centerPos))); #ifdef DEBUG_MEGAPAIR log_os << __FUNCTION__ << ": frag begin/end/overlap: " << fragBeginRefPos << " " << fragEndRefPos << " " << fragOverlap << "\n"; #endif if (fragOverlap < pairOpt.minFragSupport) return; } SVFragmentEvidence& fragment(evidence.getSampleEvidence(bamParams.bamIndex)[bamRead.qname()]); static const bool isShadow(false); SVFragmentEvidenceRead& evRead(fragment.getRead(bamRead.is_first())); setReadEvidence(svParams.minMapQ, svParams.minTier2MapQ, bamRead, isShadow, evRead); setAlleleFrag(*bamParams.fragDistroPtr, templateSize, fragment.ref.getBp(isBp1),isLargeInsert); }
bool isMateInsertionEvidenceCandidate( const bam_record& bamRead, const unsigned minMapq) { if (! bamRead.is_paired()) return false; if (bamRead.isNonStrictSupplement()) return false; if (bamRead.is_unmapped() || bamRead.is_mate_unmapped()) return false; if (bamRead.map_qual() < minMapq) return false; if (bamRead.target_id() < 0) return false; if (bamRead.mate_target_id() < 0) return false; if (bamRead.target_id() != bamRead.mate_target_id()) return true; /// TODO: better candidate definition based on fragment size distro: static const int minSize(10000); return (std::abs(bamRead.pos()-bamRead.mate_pos()) >= minSize); }