position BamAlignmentReader::GetLastPositionInBam(const std::string& bamPath, Region::SharedPtr regionPtr) { BamTools::BamReader bamReader; if (!bamReader.Open(bamPath)) { throw "Unable to open bam file"; } bamReader.LocateIndex(); int refID = bamReader.GetReferenceID(regionPtr->getReferenceID()); auto referenceData = bamReader.GetReferenceData(); bamReader.Close(); return referenceData[refID].RefLength; }
void Config::InitializationClustering() { struct stat st; if(stat(Workspace.c_str(),&st) == 0 and st.st_mode and S_IFDIR != 0) Log("[Warning] Workspace directory already present"); else if (mkdir(Workspace.c_str(), 0755) != 0) { Log("[Error] Could not create workspace directory: " + Workspace); exit(1); } RunningTasksFile = Workspace + "/" + FilePrefix + "running.tasks"; StatsFile = Workspace + "/" + FilePrefix + "stats"; BinClusterFile = Workspace + "/" + FilePrefix + "bpc"; clusterFile = new ClusterFile(BinClusterFile); clusterDir = Workspace + "/clusters/"; if(stat(clusterDir.c_str(),&st) == 0 and st.st_mode and S_IFDIR != 0) Log("[Warning] Cluster directory already present"); else if (mkdir(clusterDir.c_str(), 0755) != 0) { Log("[Error] Could not create cluster directory: " + clusterDir); exit(1); } insertsizeDir = Workspace + "/insertsize/"; if(stat(insertsizeDir.c_str(),&st) == 0 and st.st_mode and S_IFDIR != 0) Log("[Warning] Insertsize directory already present"); else if (mkdir(insertsizeDir.c_str(), 0755) != 0) { Log("[Error] Could not create insertsize directory: " + insertsizeDir); exit(1); } coverageDir = Workspace + "/coverage/"; if(stat(coverageDir.c_str(),&st) == 0 and st.st_mode and S_IFDIR != 0) Log("[Warning] Coverage directory already present"); else if (mkdir(coverageDir.c_str(), 0755) != 0) { Log("[Error] Could not create coverage directory: " + coverageDir); exit(1); } if (!ForwardBam.empty() && !ReverseBam.empty() && PairedBam.empty()) { UsePairedBam = false; } else if (ForwardBam.empty() && ReverseBam.empty() && !PairedBam.empty()) { UsePairedBam = true; } else { Log("[Error] No correct bam file(s)"); exit(1); } BamTools::BamAlignment alignment; BamTools::BamReader BamReader; if (UsePairedBam) { BamReader.Open(PairedBam); if (not BamReader.IsOpen()) { Log("[Error] Could not open paired bam"); exit(1); } if (PairedIndex.empty()) { if (not BamReader.LocateIndex(BamTools::BamIndex::STANDARD)) { PairedIndex = PairedBam.substr(0,PairedBam.find_last_of(".bam")-3) + ".bai"; BamReader.OpenIndex(PairedIndex); } if (not BamReader.HasIndex()) { Log("[Error] No index for bamfile"); exit(1); } } BamTools::SamHeader header = BamReader.GetHeader(); for (BamTools::SamReadGroupIterator it = header.ReadGroups.Begin(); it != header.ReadGroups.End(); it++) { BamTools::SamReadGroup* readgroup = &*it; readNameConverter.TrimName(readgroup->ID); readNameConverter.AddReadGroup(readgroup->ID); } long int count = 0; while (BamReader.GetNextAlignment(alignment)) { string RG; if (alignment.GetTag("RG", RG)) { if (not NameTrim.empty()) readNameConverter.TrimName(RG); if (readNameConverter.AddReadGroup(RG)) { Log("[Warning] Readgroup '" + RG + "' found in reads but not in header"); count = 0; } } count++; if (count > 10000) break; } BamReader.Close(); } else { BamReader.Open(ForwardBam); if (not BamReader.IsOpen()) { Log("[Error] Could not open first/forward bam"); exit(1); } if (ForwardIndex.empty()) { if (not BamReader.LocateIndex(BamTools::BamIndex::STANDARD)) { ForwardIndex = ForwardBam.substr(0,ForwardBam.find_last_of(".bam")-3) + ".bai"; BamReader.OpenIndex(ForwardIndex); } if (not BamReader.HasIndex()) { Log("[Error] No index for forward bamfile"); exit(1); } } BamTools::SamHeader forwardheader = BamReader.GetHeader(); for (BamTools::SamReadGroupIterator it = forwardheader.ReadGroups.Begin(); it != forwardheader.ReadGroups.End(); it++) { BamTools::SamReadGroup* readgroup = &*it; readNameConverter.TrimName(readgroup->ID); readNameConverter.AddReadGroup(readgroup->ID); } long int count = 0; while (BamReader.GetNextAlignment(alignment)) { string RG; if (alignment.GetTag("RG", RG)) { if (!NameTrim.empty()) readNameConverter.TrimName(RG); if (readNameConverter.AddReadGroup(RG)) { Log("[Warning] Readgroup '" + RG + "' found in forward reads but not in header"); count = 0; } } count++; if (count > 10000) break; } BamReader.Close(); BamReader.Open(ReverseBam); if (not BamReader.IsOpen()) { Log("[Error] Could not open second/reverse bam"); exit(1); } if (ReverseIndex.empty()) { if (not BamReader.LocateIndex(BamTools::BamIndex::STANDARD)) { ReverseIndex = ReverseBam.substr(0,ReverseBam.find_last_of(".bam")-3) + ".bai"; BamReader.OpenIndex(ReverseIndex); } if (not BamReader.HasIndex()) { Log("[Error] No index for reverse bamfile"); exit(1); } } BamTools::SamHeader reverseheader = BamReader.GetHeader(); for (BamTools::SamReadGroupIterator it = reverseheader.ReadGroups.Begin(); it != reverseheader.ReadGroups.End(); it++) { BamTools::SamReadGroup* readgroup = &*it; readNameConverter.TrimName(readgroup->ID); if (readNameConverter.AddReadGroup(readgroup->ID)) { Log("[Warning] Readgroup '" + readgroup->ID + "' found in reverse but not in forward"); } } count = 0; while (BamReader.GetNextAlignment(alignment)) { string RG; if (alignment.GetTag("RG", RG)) { if (!NameTrim.empty()) readNameConverter.TrimName(RG); if (readNameConverter.AddReadGroup(RG)) { Log("[Warning] Readgroup '" + RG + "' found in reverse reads but not in header"); count = 0; } } count++; if (count > 10000) break; } BamReader.Close(); } for(map<string, int>::iterator it = readNameConverter.ReadGroups.begin(); it!=readNameConverter.ReadGroups.end(); ++it) { ostringstream logBuffer; logBuffer << "Readgroup found: " << it->second << " - " << it->first; Log(logBuffer.str()); } writeConfigFile(Workspace + FilePrefix + "config"); }
// // Main // int somaticVariantFiltersMain(int argc, char** argv) { parseSomaticVariantFiltersOptions(argc, argv); Timer* pTimer = new Timer(PROGRAM_IDENT); // Load Reference ReadTable refTable(opt::referenceFile, SRF_NO_VALIDATION); refTable.indexReadsByID(); // Load BAMs BamTools::BamReader* pTumorBamReader = new BamTools::BamReader; pTumorBamReader->Open(opt::tumorBamFile); pTumorBamReader->LocateIndex(); assert(pTumorBamReader->HasIndex()); BamTools::BamReader* pNormalBamReader = new BamTools::BamReader; pNormalBamReader->Open(opt::normalBamFile); pNormalBamReader->LocateIndex(); assert(pNormalBamReader->HasIndex()); // Track duplicated variants HashSet<std::string> duplicateHash; std::ifstream input(opt::vcfFile.c_str()); std::string line; while(getline(input, line)) { if(line.empty()) continue; if(line[0] == '#') { std::cout << line << "\n"; continue; } // parse record VCFRecord record(line); if(record.isMultiAllelic()) { std::cerr << "Error: multi-allelic VCF found, please run vcfbreakmulti\n"; exit(EXIT_FAILURE); } // Check if we've seen this variant already std::string key = makeVariantKey(record); if(duplicateHash.find(key) != duplicateHash.end()) continue; else duplicateHash.insert(key); if(opt::verbose > 0) { std::stringstream ss; ss << "Variant: " << record << "\n"; fprintf(stderr, "===============================================\n%s", ss.str().c_str()); } StringStringHash tagHash; makeTagHash(record, tagHash); StringVector fail_reasons; int hplen = 0; if(!getTagValue(tagHash, "HPLen", hplen)) hplen = calculateHomopolymerLength(record, &refTable); if(hplen > opt::maxHPLen) fail_reasons.push_back("Homopolymer"); double dust = 0.0f; if(!getTagValue(tagHash, "Dust", dust)) dust = HapgenUtil::calculateDustScoreAtPosition(record.refName, record.refPosition, &refTable); if(dust > opt::maxDust) fail_reasons.push_back("LowComplexity"); double af; if(getTagValue(tagHash, "AF", af) && af < opt::minAF) fail_reasons.push_back("LowAlleleFrequency"); int varDP; if(getTagValue(tagHash, "VarDP", varDP) && varDP < opt::minVarDP) fail_reasons.push_back("LowVarDP"); double strandBias; if(getTagValue(tagHash, "SB", strandBias) && strandBias >= opt::maxStrandBias) fail_reasons.push_back("StrandBias"); CoverageStats tumor_stats = getVariantCoverage(pTumorBamReader, record, &refTable); CoverageStats normal_stats = getVariantCoverage(pNormalBamReader, record, &refTable); if(opt::verbose > 0) { fprintf(stderr, "Tumor: [%zu %zu]\n", tumor_stats.n_total_reads, tumor_stats.n_evidence_reads); fprintf(stderr, "Normal: [%zu %zu]\n", normal_stats.n_total_reads, normal_stats.n_evidence_reads); } if(normal_stats.n_evidence_reads > opt::maxNormalReads) fail_reasons.push_back("NormalEvidence"); if(normal_stats.n_total_reads < opt::minNormalDepth) fail_reasons.push_back("LowNormalDepth"); if(!tumor_stats.snv_evidence_quals.empty()) { double median_quality = median(tumor_stats.snv_evidence_quals); if(median_quality < opt::minMedianQuality) fail_reasons.push_back("LowQuality"); } if(tumor_stats.median_mapping_quality < opt::minMedianQuality) fail_reasons.push_back("LowMappingQuality"); if(!fail_reasons.empty()) { if(record.passStr != "PASS" && record.passStr != ".") fail_reasons.insert(fail_reasons.begin(), record.passStr); std::stringstream strss; std::copy(fail_reasons.begin(), fail_reasons.end(), std::ostream_iterator<std::string>(strss, ";")); record.passStr = strss.str(); record.passStr.erase(record.passStr.size() - 1); // erase trailing ; } std::cout << record << "\n"; } // Cleanup delete pTumorBamReader; delete pNormalBamReader; delete pTimer; return 0; }
inline int run(Config const& c, TSingleHit) { // Create library objects typedef std::map<std::string, LibraryInfo> TLibraryMap; typedef std::map<std::string, TLibraryMap> TSampleLibrary; TSampleLibrary sampleLib; // Scan libraries for(unsigned int file_c = 0; file_c < c.files.size(); ++file_c) { // Get a sample name std::string sampleName(c.files[file_c].stem().string()); // Check that all input bam files exist BamTools::BamReader reader; if ( ! reader.Open(c.files[file_c].string()) ) { std::cerr << "Could not open input bam file: " << c.files[file_c].string() << std::endl; reader.Close(); return -1; } // Check that all input bam files are indexed reader.LocateIndex(); if ( !reader.HasIndex() ) { std::cerr << "Missing bam index file: " << c.files[file_c].string() << std::endl; reader.Close(); return -1; } // Get library parameters and overall maximum insert size TLibraryMap libInfo; getLibraryParams(c.files[file_c], libInfo, 0, 5); sampleLib.insert(std::make_pair(sampleName, libInfo)); } // Read all SV intervals typedef std::vector<StructuralVariantRecord> TSVs; TSVs svs; std::map<unsigned int, std::string> idToName; unsigned int intervalCount=1; if (boost::filesystem::exists(c.int_file) && boost::filesystem::is_regular_file(c.int_file) && boost::filesystem::file_size(c.int_file)) { Memory_mapped_file interval_file(c.int_file.string().c_str()); char interval_buffer[Memory_mapped_file::MAX_LINE_LENGTH]; while (interval_file.left_bytes() > 0) { interval_file.read_line(interval_buffer); // Read single interval line StructuralVariantRecord sv; Tokenizer token(interval_buffer, Memory_mapped_file::MAX_LINE_LENGTH); std::string interval_rname; token.getString(sv.chr); sv.svStart = token.getUInt(); sv.svEnd = token.getUInt() + 1; std::string svName; token.getString(svName); idToName.insert(std::make_pair(intervalCount, svName)); sv.id = intervalCount++; svs.push_back(sv); } interval_file.close(); } else { // Create artificial intervals BamTools::BamReader readerRef; if ( ! readerRef.Open(c.files[0].string()) ) return -1; BamTools::RefVector references = readerRef.GetReferenceData(); typename BamTools::RefVector::const_iterator itRef = references.begin(); for(int refIndex=0;itRef!=references.end();++itRef, ++refIndex) { int32_t pos = 0; while (pos < references[refIndex].RefLength) { int32_t window_len = pos+c.window_size; if (window_len > references[refIndex].RefLength) window_len = references[refIndex].RefLength; StructuralVariantRecord sv; sv.chr = references[refIndex].RefName; sv.svStart = pos; sv.svEnd = window_len; std::stringstream s; s << sv.chr << ":" << sv.svStart << "-" << sv.svEnd; idToName.insert(std::make_pair(intervalCount, s.str())); sv.id = intervalCount++; svs.push_back(sv); pos += c.window_offset; } } } // Output data types typedef std::pair<std::string, int> TSampleSVPair; typedef std::pair<int, int> TBpRead; typedef std::map<TSampleSVPair, TBpRead> TCountMap; TCountMap countMap; // Annotate coverage annotateCoverage(c.files, c.minMapQual, c.inclCigar, sampleLib, svs, countMap, TSingleHit()); // Output library statistics std::cout << "Library statistics" << std::endl; TSampleLibrary::const_iterator sampleIt=sampleLib.begin(); for(;sampleIt!=sampleLib.end();++sampleIt) { std::cout << "Sample: " << sampleIt->first << std::endl; TLibraryMap::const_iterator libIt=sampleIt->second.begin(); for(;libIt!=sampleIt->second.end();++libIt) { std::cout << "RG: ID=" << libIt->first << ",Median=" << libIt->second.median << ",MAD=" << libIt->second.mad << ",Orientation=" << (int) libIt->second.defaultOrient << ",MappedReads=" << libIt->second.mappedReads << ",DuplicatePairs=" << libIt->second.non_unique_pairs << ",UniquePairs=" << libIt->second.unique_pairs << std::endl; } } // Output file boost::iostreams::filtering_ostream dataOut; dataOut.push(boost::iostreams::gzip_compressor()); dataOut.push(boost::iostreams::file_sink(c.outfile.string().c_str(), std::ios_base::out | std::ios_base::binary)); // Iterate all SVs typename TSVs::const_iterator itSV = svs.begin(); typename TSVs::const_iterator itSVEnd = svs.end(); for(;itSV!=itSVEnd;++itSV) { dataOut << itSV->chr << "\t" << itSV->svStart << "\t" << itSV->svEnd << "\t" << idToName.find(itSV->id)->second; // Iterate all samples for(unsigned int file_c = 0; file_c < c.files.size(); ++file_c) { // Get the sample name std::string sampleName(c.files[file_c].stem().string()); TSampleSVPair sampleSVPair = std::make_pair(sampleName, itSV->id); typename TCountMap::iterator countMapIt=countMap.find(sampleSVPair); dataOut << "\t"; if (c.avg_flag) dataOut << ( (countMapIt->second.first) / (double) (itSV->svEnd - itSV->svStart)) << "\t"; if (c.bp_flag) dataOut << countMapIt->second.first << "\t"; dataOut << countMapIt->second.second; } dataOut << std::endl; } // End boost::posix_time::ptime now = boost::posix_time::second_clock::local_time(); std::cout << '[' << boost::posix_time::to_simple_string(now) << "] Done." << std::endl;; return 0; }
inline int run(Config const& c, TCoverageType covType) { // Create library objects typedef boost::unordered_map<std::string, LibraryInfo> TLibraryMap; typedef boost::unordered_map<std::string, TLibraryMap> TSampleLibrary; TSampleLibrary sampleLib; // Scan libraries for(unsigned int file_c = 0; file_c < c.files.size(); ++file_c) { // Get a sample name std::string sampleName(c.files[file_c].stem().string()); // Check that all input bam files exist BamTools::BamReader reader; if ( ! reader.Open(c.files[file_c].string()) ) { std::cerr << "Could not open input bam file: " << c.files[file_c].string() << std::endl; reader.Close(); return -1; } // Check that all input bam files are indexed reader.LocateIndex(); if ( !reader.HasIndex() ) { std::cerr << "Missing bam index file: " << c.files[file_c].string() << std::endl; reader.Close(); return -1; } // Get library parameters and overall maximum insert size TLibraryMap libInfo; getLibraryParams(c.files[file_c], libInfo, 0, 5); sampleLib.insert(std::make_pair(sampleName, libInfo)); } // Get references BamTools::BamReader readerRef; if ( ! readerRef.Open(c.files[0].string()) ) return -1; BamTools::RefVector references = readerRef.GetReferenceData(); // Read all SV intervals typedef std::vector<CovRecord> TSVs; TSVs svs; std::map<unsigned int, std::string> idToName; unsigned int intervalCount=1; if (boost::filesystem::exists(c.int_file) && boost::filesystem::is_regular_file(c.int_file) && boost::filesystem::file_size(c.int_file)) { typedef boost::unordered_map<std::string, unsigned int> TMapChr; TMapChr mapChr; typename BamTools::RefVector::const_iterator itRef = references.begin(); for(unsigned int i = 0;itRef!=references.end();++itRef, ++i) mapChr[ itRef->RefName ] = i; std::ifstream interval_file(c.int_file.string().c_str(), std::ifstream::in); if (interval_file.is_open()) { while (interval_file.good()) { std::string intervalLine; getline(interval_file, intervalLine); typedef boost::tokenizer< boost::char_separator<char> > Tokenizer; boost::char_separator<char> sep(" \t,;"); Tokenizer tokens(intervalLine, sep); Tokenizer::iterator tokIter = tokens.begin(); if (tokIter!=tokens.end()) { std::string chrName=*tokIter++; TMapChr::const_iterator mapChrIt = mapChr.find(chrName); if (mapChrIt != mapChr.end()) { if (tokIter!=tokens.end()) { CovRecord sv; sv.chr = mapChrIt->second; sv.svStart = boost::lexical_cast<int32_t>(*tokIter++); sv.svEnd = boost::lexical_cast<int32_t>(*tokIter++) + 1; std::string svName = *tokIter; idToName.insert(std::make_pair(intervalCount, svName)); sv.id = intervalCount++; svs.push_back(sv); } } } } interval_file.close(); } } else { // Create artificial intervals typename BamTools::RefVector::const_iterator itRef = references.begin(); for(int refIndex=0;itRef!=references.end();++itRef, ++refIndex) { int32_t pos = 0; unsigned int wSize = c.window_size; unsigned int wOffset = c.window_offset; if (c.window_num>0) { wSize=(itRef->RefLength / c.window_num) + 1; wOffset=wSize; } while (pos < references[refIndex].RefLength) { int32_t window_len = pos+wSize; if (window_len > references[refIndex].RefLength) window_len = references[refIndex].RefLength; CovRecord sv; sv.chr = refIndex; sv.svStart = pos; sv.svEnd = window_len; std::stringstream s; s << references[sv.chr].RefName << ":" << sv.svStart << "-" << sv.svEnd; idToName.insert(std::make_pair(intervalCount, s.str())); sv.id = intervalCount++; svs.push_back(sv); pos += wOffset; } } } // Output data types typedef std::pair<std::string, int> TSampleSVPair; typedef std::pair<int, int> TBpRead; typedef std::map<TSampleSVPair, TBpRead> TCountMap; TCountMap countMap; // Annotate coverage if (c.inclCigar) annotateCoverage(c.files, c.minGenoQual, sampleLib, svs, countMap, BpLevelType<BpLevelCount>(), covType); else annotateCoverage(c.files, c.minGenoQual, sampleLib, svs, countMap, BpLevelType<NoBpLevelCount>(), covType); // Output library statistics std::cout << "Library statistics" << std::endl; TSampleLibrary::const_iterator sampleIt=sampleLib.begin(); for(;sampleIt!=sampleLib.end();++sampleIt) { std::cout << "Sample: " << sampleIt->first << std::endl; TLibraryMap::const_iterator libIt=sampleIt->second.begin(); for(;libIt!=sampleIt->second.end();++libIt) { std::cout << "RG: ID=" << libIt->first << ",Median=" << libIt->second.median << ",MAD=" << libIt->second.mad << ",Orientation=" << (int) libIt->second.defaultOrient << std::endl; } } // Output file boost::iostreams::filtering_ostream dataOut; dataOut.push(boost::iostreams::gzip_compressor()); dataOut.push(boost::iostreams::file_sink(c.outfile.string().c_str(), std::ios_base::out | std::ios_base::binary)); // Print header dataOut << "#chr\tstart\tend\tid"; for(unsigned int file_c = 0; file_c < c.files.size(); ++file_c) { std::string sampleName(c.files[file_c].stem().string()); dataOut << "\t"; if (c.avg_flag) dataOut << sampleName << "_avgcov" << "\t"; if (c.bp_flag) dataOut << sampleName << "_bpcount" << "\t"; if ((c.bp_flag) || (c.avg_flag)) dataOut << sampleName << "_readcount"; else dataOut << sampleName; } dataOut << std::endl; // Iterate all SVs typename TSVs::const_iterator itSV = svs.begin(); typename TSVs::const_iterator itSVEnd = svs.end(); for(;itSV!=itSVEnd;++itSV) { dataOut << references[itSV->chr].RefName << "\t" << itSV->svStart << "\t" << itSV->svEnd << "\t" << idToName.find(itSV->id)->second; // Iterate all samples for(unsigned int file_c = 0; file_c < c.files.size(); ++file_c) { // Get the sample name std::string sampleName(c.files[file_c].stem().string()); TSampleSVPair sampleSVPair = std::make_pair(sampleName, itSV->id); typename TCountMap::iterator countMapIt=countMap.find(sampleSVPair); dataOut << "\t"; if (c.avg_flag) dataOut << ( (countMapIt->second.first) / (double) (itSV->svEnd - itSV->svStart)) << "\t"; if (c.bp_flag) dataOut << countMapIt->second.first << "\t"; dataOut << countMapIt->second.second; } dataOut << std::endl; } // End boost::posix_time::ptime now = boost::posix_time::second_clock::local_time(); std::cout << '[' << boost::posix_time::to_simple_string(now) << "] Done." << std::endl;; return 0; }