int ClusterDoturCommand::execute(){ try { if (abort) { if (calledHelp) { return 0; } return 2; } ClusterClassic* cluster = new ClusterClassic(cutoff, method, sim); NameAssignment* nameMap = NULL; CountTable* ct = NULL; map<string, int> counts; if(namefile != "") { nameMap = new NameAssignment(namefile); nameMap->readMap(); cluster->readPhylipFile(phylipfile, nameMap); delete nameMap; }else if (countfile != "") { ct = new CountTable(); ct->readTable(countfile, false, false); cluster->readPhylipFile(phylipfile, ct); counts = ct->getNameMap(); delete ct; }else { cluster->readPhylipFile(phylipfile, nameMap); } tag = cluster->getTag(); if (m->getControl_pressed()) { delete cluster; return 0; } list = cluster->getListVector(); rabund = cluster->getRAbundVector(); if (outputDir == "") { outputDir += util.hasPath(phylipfile); } fileroot = outputDir + util.getRootName(util.getSimpleName(phylipfile)); map<string, string> variables; variables["[filename]"] = fileroot; variables["[clustertag]"] = tag; string sabundFileName = getOutputFileName("sabund", variables); string rabundFileName = getOutputFileName("rabund", variables); //if (countfile != "") { variables["[tag2]"] = "unique_list"; } string listFileName = getOutputFileName("list", variables); if (countfile == "") { util.openOutputFile(sabundFileName, sabundFile); util.openOutputFile(rabundFileName, rabundFile); outputNames.push_back(sabundFileName); outputTypes["sabund"].push_back(sabundFileName); outputNames.push_back(rabundFileName); outputTypes["rabund"].push_back(rabundFileName); } util.openOutputFile(listFileName, listFile); outputNames.push_back(listFileName); outputTypes["list"].push_back(listFileName); float previousDist = 0.00000; float rndPreviousDist = 0.00000; oldRAbund = *rabund; oldList = *list; bool printHeaders = true; int estart = time(NULL); int loop = 0; while ((cluster->getSmallDist() <= cutoff) && (cluster->getNSeqs() > 1)){ if (m->getControl_pressed()) { delete cluster; delete list; delete rabund; if(countfile == "") {rabundFile.close(); sabundFile.close(); util.mothurRemove((fileroot+ tag + ".rabund")); util.mothurRemove((fileroot+ tag + ".sabund")); } listFile.close(); util.mothurRemove((fileroot+ tag + ".list")); outputTypes.clear(); return 0; } cluster->update(cutoff); float dist = cluster->getSmallDist(); float rndDist = util.ceilDist(dist, precision); //cout << loop << '\t' << dist << '\t' << oldList.getNumBins() << endl; loop++; if(previousDist <= 0.0000 && dist != previousDist) { printData("unique", counts, printHeaders); } else if(rndDist != rndPreviousDist) { printData(toString(rndPreviousDist, length-1), counts, printHeaders); } previousDist = dist; rndPreviousDist = rndDist; oldRAbund = *rabund; oldList = *list; } if(previousDist <= 0.0000) { printData("unique", counts, printHeaders); } else if(rndPreviousDist<cutoff) { printData(toString(rndPreviousDist, length-1), counts, printHeaders); } if (countfile == "") { sabundFile.close(); rabundFile.close(); } listFile.close(); delete cluster; delete list; delete rabund; //set list file as new current listfile string currentName = ""; itTypes = outputTypes.find("list"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { currentName = (itTypes->second)[0]; current->setListFile(currentName); } } //set rabund file as new current rabundfile itTypes = outputTypes.find("rabund"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { currentName = (itTypes->second)[0]; current->setRabundFile(currentName); } } //set sabund file as new current sabundfile itTypes = outputTypes.find("sabund"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { currentName = (itTypes->second)[0]; current->setSabundFile(currentName); } } m->mothurOut("\nOutput File Names: \n"); for (int i = 0; i < outputNames.size(); i++) { m->mothurOut(outputNames[i] +"\n"); } m->mothurOutEndLine(); m->mothurOut("It took " + toString(time(NULL) - estart) + " seconds to cluster"); m->mothurOutEndLine(); return 0; } catch(exception& e) { m->errorOut(e, "ClusterDoturCommand", "execute"); exit(1); } }
//*************************************************************************************************************** int SummaryQualCommand::execute(){ try{ if (abort == true) { if (calledHelp) { return 0; } return 2; } int start = time(NULL); int numSeqs = 0; vector<int> position; vector<int> averageQ; vector< vector<int> > scores; if (m->control_pressed) { return 0; } if (namefile != "") { nameMap = m->readNames(namefile); } else if (countfile != "") { CountTable ct; ct.readTable(countfile, false, false); nameMap = ct.getNameMap(); } vector<unsigned long long> positions; #if defined (__APPLE__) || (__MACH__) || (linux) || (__linux) || (__linux__) || (__unix__) || (__unix) positions = m->divideFile(qualfile, processors); for (int i = 0; i < (positions.size()-1); i++) { lines.push_back(linePair(positions[i], positions[(i+1)])); } #else if (processors == 1) { lines.push_back(linePair(0, 1000)); }else { positions = m->setFilePosFasta(qualfile, numSeqs); if (numSeqs < processors) { processors = numSeqs; } //figure out how many sequences you have to process int numSeqsPerProcessor = numSeqs / processors; for (int i = 0; i < processors; i++) { int startIndex = i * numSeqsPerProcessor; if(i == (processors - 1)){ numSeqsPerProcessor = numSeqs - i * numSeqsPerProcessor; } lines.push_back(linePair(positions[startIndex], numSeqsPerProcessor)); } } #endif if(processors == 1){ numSeqs = driverCreateSummary(position, averageQ, scores, qualfile, lines[0]); } else{ numSeqs = createProcessesCreateSummary(position, averageQ, scores, qualfile); } if (m->control_pressed) { return 0; } //print summary file map<string, string> variables; variables["[filename]"] = outputDir + m->getRootName(m->getSimpleName(qualfile)); string summaryFile = getOutputFileName("summary",variables); printQual(summaryFile, position, averageQ, scores); if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } //output results to screen cout.setf(ios::fixed, ios::floatfield); cout.setf(ios::showpoint); m->mothurOutEndLine(); m->mothurOut("Position\tNumSeqs\tAverageQ"); m->mothurOutEndLine(); for (int i = 0; i < position.size(); i+=100) { float average = averageQ[i] / (float) position[i]; cout << i << '\t' << position[i] << '\t' << average; m->mothurOutJustToLog(toString(i) + "\t" + toString(position[i]) + "\t" + toString(average)); m->mothurOutEndLine(); } m->mothurOutEndLine(); m->mothurOut("It took " + toString(time(NULL) - start) + " secs to create the summary file for " + toString(numSeqs) + " sequences."); m->mothurOutEndLine(); m->mothurOutEndLine(); m->mothurOutEndLine(); m->mothurOut("Output File Names: "); m->mothurOutEndLine(); m->mothurOut(summaryFile); m->mothurOutEndLine(); outputNames.push_back(summaryFile); outputTypes["summary"].push_back(summaryFile); m->mothurOutEndLine(); return 0; } catch(exception& e) { m->errorOut(e, "SummaryQualCommand", "execute"); exit(1); } }
int SeqSummaryCommand::execute(){ try{ if (abort == true) { if (calledHelp) { return 0; } return 2; } //set current fasta to fastafile m->setFastaFile(fastafile); map<string, string> variables; variables["[filename]"] = outputDir + m->getRootName(m->getSimpleName(fastafile)); string summaryFile = getOutputFileName("summary",variables); int numSeqs = 0; vector<int> startPosition; vector<int> endPosition; vector<int> seqLength; vector<int> ambigBases; vector<int> longHomoPolymer; if (namefile != "") { nameMap = m->readNames(namefile); } else if (countfile != "") { CountTable ct; ct.readTable(countfile, false, false); nameMap = ct.getNameMap(); } if (m->control_pressed) { return 0; } #ifdef USE_MPI int pid, numSeqsPerProcessor; int tag = 2001; int startTag = 1; int endTag = 2; int lengthTag = 3; int baseTag = 4; int lhomoTag = 5; int outMode=MPI_MODE_CREATE|MPI_MODE_WRONLY; vector<unsigned long long> MPIPos; MPI_Status status; MPI_Status statusOut; MPI_File inMPI; MPI_File outMPI; MPI_Comm_size(MPI_COMM_WORLD, &processors); MPI_Comm_rank(MPI_COMM_WORLD, &pid); char tempFileName[1024]; strcpy(tempFileName, fastafile.c_str()); char sumFileName[1024]; strcpy(sumFileName, summaryFile.c_str()); MPI_File_open(MPI_COMM_WORLD, tempFileName, MPI_MODE_RDONLY, MPI_INFO_NULL, &inMPI); //comm, filename, mode, info, filepointer MPI_File_open(MPI_COMM_WORLD, sumFileName, outMode, MPI_INFO_NULL, &outMPI); if (m->control_pressed) { MPI_File_close(&inMPI); MPI_File_close(&outMPI); return 0; } if (pid == 0) { //you are the root process //print header string outputString = "seqname\tstart\tend\tnbases\tambigs\tpolymer\tnumSeqs\n"; int length = outputString.length(); char* buf2 = new char[length]; memcpy(buf2, outputString.c_str(), length); MPI_File_write_shared(outMPI, buf2, length, MPI_CHAR, &statusOut); delete buf2; MPIPos = m->setFilePosFasta(fastafile, numSeqs); //fills MPIPos, returns numSeqs for(int i = 1; i < processors; i++) { MPI_Send(&numSeqs, 1, MPI_INT, i, tag, MPI_COMM_WORLD); MPI_Send(&MPIPos[0], (numSeqs+1), MPI_LONG, i, tag, MPI_COMM_WORLD); } //figure out how many sequences you have to do numSeqsPerProcessor = numSeqs / processors; int startIndex = pid * numSeqsPerProcessor; if(pid == (processors - 1)){ numSeqsPerProcessor = numSeqs - pid * numSeqsPerProcessor; } //do your part MPICreateSummary(startIndex, numSeqsPerProcessor, startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, inMPI, outMPI, MPIPos); }else { //i am the child process MPI_Recv(&numSeqs, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &status); MPIPos.resize(numSeqs+1); MPI_Recv(&MPIPos[0], (numSeqs+1), MPI_LONG, 0, tag, MPI_COMM_WORLD, &status); //figure out how many sequences you have to align numSeqsPerProcessor = numSeqs / processors; int startIndex = pid * numSeqsPerProcessor; if(pid == (processors - 1)){ numSeqsPerProcessor = numSeqs - pid * numSeqsPerProcessor; } //do your part MPICreateSummary(startIndex, numSeqsPerProcessor, startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, inMPI, outMPI, MPIPos); } MPI_File_close(&inMPI); MPI_File_close(&outMPI); MPI_Barrier(MPI_COMM_WORLD); //make everyone wait - just in case if (pid == 0) { //get the info from the child processes for(int i = 1; i < processors; i++) { int size; MPI_Recv(&size, 1, MPI_INT, i, tag, MPI_COMM_WORLD, &status); vector<int> temp; temp.resize(size+1); for(int j = 0; j < 5; j++) { MPI_Recv(&temp[0], (size+1), MPI_INT, i, 2001, MPI_COMM_WORLD, &status); int receiveTag = temp[temp.size()-1]; //child process added a int to the end to indicate what count this is for if (receiveTag == startTag) { for (int k = 0; k < size; k++) { startPosition.push_back(temp[k]); } }else if (receiveTag == endTag) { for (int k = 0; k < size; k++) { endPosition.push_back(temp[k]); } }else if (receiveTag == lengthTag) { for (int k = 0; k < size; k++) { seqLength.push_back(temp[k]); } }else if (receiveTag == baseTag) { for (int k = 0; k < size; k++) { ambigBases.push_back(temp[k]); } }else if (receiveTag == lhomoTag) { for (int k = 0; k < size; k++) { longHomoPolymer.push_back(temp[k]); } } } } }else{ //send my counts int size = startPosition.size(); MPI_Send(&size, 1, MPI_INT, 0, tag, MPI_COMM_WORLD); startPosition.push_back(startTag); int ierr = MPI_Send(&(startPosition[0]), (size+1), MPI_INT, 0, 2001, MPI_COMM_WORLD); endPosition.push_back(endTag); ierr = MPI_Send (&(endPosition[0]), (size+1), MPI_INT, 0, 2001, MPI_COMM_WORLD); seqLength.push_back(lengthTag); ierr = MPI_Send(&(seqLength[0]), (size+1), MPI_INT, 0, 2001, MPI_COMM_WORLD); ambigBases.push_back(baseTag); ierr = MPI_Send(&(ambigBases[0]), (size+1), MPI_INT, 0, 2001, MPI_COMM_WORLD); longHomoPolymer.push_back(lhomoTag); ierr = MPI_Send(&(longHomoPolymer[0]), (size+1), MPI_INT, 0, 2001, MPI_COMM_WORLD); } MPI_Barrier(MPI_COMM_WORLD); //make everyone wait - just in case #else vector<unsigned long long> positions; #if defined (__APPLE__) || (__MACH__) || (linux) || (__linux) || (__linux__) || (__unix__) || (__unix) positions = m->divideFile(fastafile, processors); for (int i = 0; i < (positions.size()-1); i++) { lines.push_back(new linePair(positions[i], positions[(i+1)])); } #else positions = m->setFilePosFasta(fastafile, numSeqs); if (positions.size() < processors) { processors = positions.size(); } //figure out how many sequences you have to process int numSeqsPerProcessor = numSeqs / processors; for (int i = 0; i < processors; i++) { int startIndex = i * numSeqsPerProcessor; if(i == (processors - 1)){ numSeqsPerProcessor = numSeqs - i * numSeqsPerProcessor; } lines.push_back(new linePair(positions[startIndex], numSeqsPerProcessor)); } #endif if(processors == 1){ numSeqs = driverCreateSummary(startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, fastafile, summaryFile, lines[0]); }else{ numSeqs = createProcessesCreateSummary(startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, fastafile, summaryFile); } if (m->control_pressed) { return 0; } #endif #ifdef USE_MPI if (pid == 0) { #endif sort(startPosition.begin(), startPosition.end()); sort(endPosition.begin(), endPosition.end()); sort(seqLength.begin(), seqLength.end()); sort(ambigBases.begin(), ambigBases.end()); sort(longHomoPolymer.begin(), longHomoPolymer.end()); int size = startPosition.size(); //find means unsigned long long meanStartPosition, meanEndPosition, meanSeqLength, meanAmbigBases, meanLongHomoPolymer; meanStartPosition = 0; meanEndPosition = 0; meanSeqLength = 0; meanAmbigBases = 0; meanLongHomoPolymer = 0; for (int i = 0; i < size; i++) { meanStartPosition += startPosition[i]; meanEndPosition += endPosition[i]; meanSeqLength += seqLength[i]; meanAmbigBases += ambigBases[i]; meanLongHomoPolymer += longHomoPolymer[i]; } double meanstartPosition, meanendPosition, meanseqLength, meanambigBases, meanlongHomoPolymer; meanstartPosition = meanStartPosition / (double) size; meanendPosition = meanEndPosition /(double) size; meanlongHomoPolymer = meanLongHomoPolymer / (double) size; meanseqLength = meanSeqLength / (double) size; meanambigBases = meanAmbigBases /(double) size; int ptile0_25 = int(size * 0.025); int ptile25 = int(size * 0.250); int ptile50 = int(size * 0.500); int ptile75 = int(size * 0.750); int ptile97_5 = int(size * 0.975); int ptile100 = size - 1; //to compensate for blank sequences that would result in startPosition and endPostion equalling -1 if (startPosition[0] == -1) { startPosition[0] = 0; } if (endPosition[0] == -1) { endPosition[0] = 0; } if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } m->mothurOutEndLine(); m->mothurOut("\t\tStart\tEnd\tNBases\tAmbigs\tPolymer\tNumSeqs"); m->mothurOutEndLine(); m->mothurOut("Minimum:\t" + toString(startPosition[0]) + "\t" + toString(endPosition[0]) + "\t" + toString(seqLength[0]) + "\t" + toString(ambigBases[0]) + "\t" + toString(longHomoPolymer[0]) + "\t" + toString(1)); m->mothurOutEndLine(); m->mothurOut("2.5%-tile:\t" + toString(startPosition[ptile0_25]) + "\t" + toString(endPosition[ptile0_25]) + "\t" + toString(seqLength[ptile0_25]) + "\t" + toString(ambigBases[ptile0_25]) + "\t"+ toString(longHomoPolymer[ptile0_25]) + "\t" + toString(ptile0_25+1)); m->mothurOutEndLine(); m->mothurOut("25%-tile:\t" + toString(startPosition[ptile25]) + "\t" + toString(endPosition[ptile25]) + "\t" + toString(seqLength[ptile25]) + "\t" + toString(ambigBases[ptile25]) + "\t" + toString(longHomoPolymer[ptile25]) + "\t" + toString(ptile25+1)); m->mothurOutEndLine(); m->mothurOut("Median: \t" + toString(startPosition[ptile50]) + "\t" + toString(endPosition[ptile50]) + "\t" + toString(seqLength[ptile50]) + "\t" + toString(ambigBases[ptile50]) + "\t" + toString(longHomoPolymer[ptile50]) + "\t" + toString(ptile50+1)); m->mothurOutEndLine(); m->mothurOut("75%-tile:\t" + toString(startPosition[ptile75]) + "\t" + toString(endPosition[ptile75]) + "\t" + toString(seqLength[ptile75]) + "\t" + toString(ambigBases[ptile75]) + "\t" + toString(longHomoPolymer[ptile75]) + "\t" + toString(ptile75+1)); m->mothurOutEndLine(); m->mothurOut("97.5%-tile:\t" + toString(startPosition[ptile97_5]) + "\t" + toString(endPosition[ptile97_5]) + "\t" + toString(seqLength[ptile97_5]) + "\t" + toString(ambigBases[ptile97_5]) + "\t" + toString(longHomoPolymer[ptile97_5]) + "\t" + toString(ptile97_5+1)); m->mothurOutEndLine(); m->mothurOut("Maximum:\t" + toString(startPosition[ptile100]) + "\t" + toString(endPosition[ptile100]) + "\t" + toString(seqLength[ptile100]) + "\t" + toString(ambigBases[ptile100]) + "\t" + toString(longHomoPolymer[ptile100]) + "\t" + toString(ptile100+1)); m->mothurOutEndLine(); m->mothurOut("Mean:\t" + toString(meanstartPosition) + "\t" + toString(meanendPosition) + "\t" + toString(meanseqLength) + "\t" + toString(meanambigBases) + "\t" + toString(meanlongHomoPolymer)); m->mothurOutEndLine(); if ((namefile == "") && (countfile == "")) { m->mothurOut("# of Seqs:\t" + toString(numSeqs)); m->mothurOutEndLine(); } else { m->mothurOut("# of unique seqs:\t" + toString(numSeqs)); m->mothurOutEndLine(); m->mothurOut("total # of seqs:\t" + toString(startPosition.size())); m->mothurOutEndLine(); } if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } m->mothurOutEndLine(); m->mothurOut("Output File Names: "); m->mothurOutEndLine(); m->mothurOut(summaryFile); m->mothurOutEndLine(); outputNames.push_back(summaryFile); outputTypes["summary"].push_back(summaryFile); m->mothurOutEndLine(); #ifdef USE_MPI } #endif //set fasta file as new current fastafile string current = ""; itTypes = outputTypes.find("summary"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { current = (itTypes->second)[0]; m->setSummaryFile(current); } } return 0; } catch(exception& e) { m->errorOut(e, "SeqSummaryCommand", "execute"); exit(1); } }
//********************************************************************************************************************** SharedCommand::SharedCommand(string option) { try { abort = false; calledHelp = false; pickedGroups=false; allLines = 1; //allow user to run help if(option == "help") { help(); abort = true; calledHelp = true; } else if(option == "citation") { citation(); abort = true; calledHelp = true;} else { vector<string> myArray = setParameters(); OptionParser parser(option); map<string, string> parameters = parser.getParameters(); ValidParameters validParameter; map<string, string>::iterator it; //check to make sure all parameters are valid for command for (it = parameters.begin(); it != parameters.end(); it++) { if (!validParameter.isValidParameter(it->first, myArray, it->second)) { abort = true; } } //if the user changes the input directory command factory will send this info to us in the output parameter string inputDir = validParameter.valid(parameters, "inputdir"); if (inputDir == "not found"){ inputDir = ""; } else { string path; it = parameters.find("list"); //user has given a template file if(it != parameters.end()){ path = util.hasPath(it->second); //if the user has not given a path then, add inputdir. else leave path alone. if (path == "") { parameters["list"] = inputDir + it->second; } } it = parameters.find("group"); //user has given a template file if(it != parameters.end()){ path = util.hasPath(it->second); //if the user has not given a path then, add inputdir. else leave path alone. if (path == "") { parameters["group"] = inputDir + it->second; } } it = parameters.find("count"); //user has given a template file if(it != parameters.end()){ path = util.hasPath(it->second); //if the user has not given a path then, add inputdir. else leave path alone. if (path == "") { parameters["count"] = inputDir + it->second; } } it = parameters.find("biom"); //user has given a template file if(it != parameters.end()){ path = util.hasPath(it->second); //if the user has not given a path then, add inputdir. else leave path alone. if (path == "") { parameters["biom"] = inputDir + it->second; } } } vector<string> tempOutNames; outputTypes["shared"] = tempOutNames; outputTypes["group"] = tempOutNames; outputTypes["map"] = tempOutNames; //if the user changes the output directory command factory will send this info to us in the output parameter outputDir = validParameter.valid(parameters, "outputdir"); if (outputDir == "not found"){ outputDir = ""; } //check for required parameters listfile = validParameter.validFile(parameters, "list"); if (listfile == "not open") { listfile = ""; abort = true; } else if (listfile == "not found") { listfile = ""; } else { current->setListFile(listfile); } biomfile = validParameter.validFile(parameters, "biom"); if (biomfile == "not open") { biomfile = ""; abort = true; } else if (biomfile == "not found") { biomfile = ""; } else { current->setBiomFile(biomfile); } ordergroupfile = validParameter.validFile(parameters, "ordergroup"); if (ordergroupfile == "not open") { abort = true; } else if (ordergroupfile == "not found") { ordergroupfile = ""; } groupfile = validParameter.validFile(parameters, "group"); if (groupfile == "not open") { groupfile = ""; abort = true; } else if (groupfile == "not found") { groupfile = ""; } else { current->setGroupFile(groupfile); } countfile = validParameter.validFile(parameters, "count"); if (countfile == "not open") { countfile = ""; abort = true; } else if (countfile == "not found") { countfile = ""; } else { current->setCountFile(countfile); CountTable temp; if (!temp.testGroups(countfile)) { m->mothurOut("\n[WARNING]: Your count file does not have group info, all reads will be assigned to mothurGroup.\n"); temp.readTable(countfile, false, false); //dont read groups map<string, int> seqs = temp.getNameMap(); CountTable newCountTable; newCountTable.addGroup("mothurGroup"); for (map<string, int>::iterator it = seqs.begin(); it != seqs.end(); it++) { vector<int> counts; counts.push_back(it->second); newCountTable.push_back(it->first, counts); } string newCountfileName = util.getRootName(countfile) + "mothurGroup" + util.getExtension(countfile); newCountTable.printTable(newCountfileName); current->setCountFile(newCountfileName); countfile = newCountfileName; outputNames.push_back(newCountfileName); } } if ((biomfile == "") && (listfile == "") && (countfile == "")) { //you must provide at least one of the following //is there are current file available for either of these? //give priority to list, then biom, then count listfile = current->getListFile(); if (listfile != "") { m->mothurOut("Using " + listfile + " as input file for the list parameter.\n"); } else { biomfile = current->getBiomFile(); if (biomfile != "") { m->mothurOut("Using " + biomfile + " as input file for the biom parameter.\n"); } else { countfile = current->getCountFile(); if (countfile != "") { m->mothurOut("Using " + countfile + " as input file for the count parameter.\n"); } else { m->mothurOut("[ERROR]: No valid current files. You must provide a list or biom or count file before you can use the make.shared command.\n"); abort = true; } } } } else if ((biomfile != "") && (listfile != "")) { m->mothurOut("When executing a make.shared command you must enter ONLY ONE of the following: list or biom.\n"); abort = true; } if (listfile != "") { if ((groupfile == "") && (countfile == "")) { groupfile = current->getGroupFile(); if (groupfile != "") { m->mothurOut("Using " + groupfile + " as input file for the group parameter.\n"); } else { countfile = current->getCountFile(); if (countfile != "") { m->mothurOut("Using " + countfile + " as input file for the count parameter.\n"); } else { m->mothurOut("[ERROR]: You need to provide a groupfile or countfile if you are going to use the list format.\n"); abort = true; } } } } string groups = validParameter.valid(parameters, "groups"); if (groups == "not found") { groups = ""; } else { pickedGroups=true; util.splitAtDash(groups, Groups); if (Groups.size() != 0) { if (Groups[0]== "all") { Groups.clear(); } } } //check for optional parameter and set defaults // ...at some point should added some additional type checking... string label = validParameter.valid(parameters, "label"); if (label == "not found") { label = ""; } else { if(label != "all") { util.splitAtDash(label, labels); allLines = 0; } else { allLines = 1; } } if ((listfile == "") && (biomfile == "") && (countfile != "")) { //building a shared file from a count file, require label if (labels.size() == 0) { m->mothurOut("[ERROR]: You must provide a label when converting a count file to a shared file, please correct.\n"); abort = true; } } } } catch(exception& e) { m->errorOut(e, "SharedCommand", "SharedCommand"); exit(1); } }
int ClusterCommand::execute(){ try { if (abort == true) { if (calledHelp) { return 0; } return 2; } //phylip file given and cutoff not given - use cluster.classic because it uses less memory and is faster if ((format == "phylip") && (cutoff > 10.0)) { m->mothurOutEndLine(); m->mothurOut("You are using a phylip file and no cutoff. I will run cluster.classic to save memory and time."); m->mothurOutEndLine(); //run unique.seqs for deconvolute results string inputString = "phylip=" + distfile; if (namefile != "") { inputString += ", name=" + namefile; } else if (countfile != "") { inputString += ", count=" + countfile; } inputString += ", precision=" + toString(precision); inputString += ", method=" + method; if (hard) { inputString += ", hard=T"; } else { inputString += ", hard=F"; } if (sim) { inputString += ", sim=T"; } else { inputString += ", sim=F"; } m->mothurOutEndLine(); m->mothurOut("/------------------------------------------------------------/"); m->mothurOutEndLine(); m->mothurOut("Running command: cluster.classic(" + inputString + ")"); m->mothurOutEndLine(); Command* clusterClassicCommand = new ClusterDoturCommand(inputString); clusterClassicCommand->execute(); delete clusterClassicCommand; m->mothurOut("/------------------------------------------------------------/"); m->mothurOutEndLine(); return 0; } ReadMatrix* read; if (format == "column") { read = new ReadColumnMatrix(columnfile, sim); } //sim indicates whether its a similarity matrix else if (format == "phylip") { read = new ReadPhylipMatrix(phylipfile, sim); } read->setCutoff(cutoff); NameAssignment* nameMap = NULL; CountTable* ct = NULL; map<string, int> counts; if(namefile != ""){ nameMap = new NameAssignment(namefile); nameMap->readMap(); read->read(nameMap); }else if (countfile != "") { ct = new CountTable(); ct->readTable(countfile, false, false); read->read(ct); counts = ct->getNameMap(); }else { read->read(nameMap); } list = read->getListVector(); matrix = read->getDMatrix(); if(countfile != "") { rabund = new RAbundVector(); createRabund(ct, list, rabund); //creates an rabund that includes the counts for the unique list delete ct; }else { rabund = new RAbundVector(list->getRAbundVector()); } delete read; if (m->control_pressed) { //clean up delete list; delete matrix; delete rabund; if(countfile == ""){rabundFile.close(); sabundFile.close(); m->mothurRemove((fileroot+ tag + ".rabund")); m->mothurRemove((fileroot+ tag + ".sabund")); } listFile.close(); m->mothurRemove((fileroot+ tag + ".list")); outputTypes.clear(); return 0; } //create cluster if (method == "furthest") { cluster = new CompleteLinkage(rabund, list, matrix, cutoff, method, adjust); } else if(method == "nearest"){ cluster = new SingleLinkage(rabund, list, matrix, cutoff, method, adjust); } else if(method == "average"){ cluster = new AverageLinkage(rabund, list, matrix, cutoff, method, adjust); } else if(method == "weighted"){ cluster = new WeightedLinkage(rabund, list, matrix, cutoff, method, adjust); } tag = cluster->getTag(); if (outputDir == "") { outputDir += m->hasPath(distfile); } fileroot = outputDir + m->getRootName(m->getSimpleName(distfile)); map<string, string> variables; variables["[filename]"] = fileroot; variables["[clustertag]"] = tag; string sabundFileName = getOutputFileName("sabund", variables); string rabundFileName = getOutputFileName("rabund", variables); if (countfile != "") { variables["[tag2]"] = "unique_list"; } string listFileName = getOutputFileName("list", variables); if (countfile == "") { m->openOutputFile(sabundFileName, sabundFile); m->openOutputFile(rabundFileName, rabundFile); outputNames.push_back(sabundFileName); outputTypes["sabund"].push_back(sabundFileName); outputNames.push_back(rabundFileName); outputTypes["rabund"].push_back(rabundFileName); } m->openOutputFile(listFileName, listFile); outputNames.push_back(listFileName); outputTypes["list"].push_back(listFileName); list->printHeaders(listFile); time_t estart = time(NULL); float previousDist = 0.00000; float rndPreviousDist = 0.00000; oldRAbund = *rabund; oldList = *list; print_start = true; start = time(NULL); loops = 0; double saveCutoff = cutoff; while (matrix->getSmallDist() < cutoff && matrix->getNNodes() > 0){ if (m->control_pressed) { //clean up delete list; delete matrix; delete rabund; delete cluster; if(countfile == "") {rabundFile.close(); sabundFile.close(); m->mothurRemove((fileroot+ tag + ".rabund")); m->mothurRemove((fileroot+ tag + ".sabund")); } listFile.close(); m->mothurRemove((fileroot+ tag + ".list")); outputTypes.clear(); return 0; } if (print_start && m->isTrue(timing)) { m->mothurOut("Clustering (" + tag + ") dist " + toString(matrix->getSmallDist()) + "/" + toString(m->roundDist(matrix->getSmallDist(), precision)) + "\t(precision: " + toString(precision) + ", Nodes: " + toString(matrix->getNNodes()) + ")"); cout.flush(); print_start = false; } loops++; cluster->update(cutoff); float dist = matrix->getSmallDist(); float rndDist; if (hard) { rndDist = m->ceilDist(dist, precision); }else{ rndDist = m->roundDist(dist, precision); } if(previousDist <= 0.0000 && dist != previousDist){ printData("unique", counts); } else if(rndDist != rndPreviousDist){ printData(toString(rndPreviousDist, length-1), counts); } previousDist = dist; rndPreviousDist = rndDist; oldRAbund = *rabund; oldList = *list; } if (print_start && m->isTrue(timing)) { m->mothurOut("Clustering (" + tag + ") for distance " + toString(previousDist) + "/" + toString(rndPreviousDist) + "\t(precision: " + toString(precision) + ", Nodes: " + toString(matrix->getNNodes()) + ")"); cout.flush(); print_start = false; } if(previousDist <= 0.0000){ printData("unique", counts); } else if(rndPreviousDist<cutoff){ printData(toString(rndPreviousDist, length-1), counts); } delete matrix; delete list; delete rabund; delete cluster; if (countfile == "") { sabundFile.close(); rabundFile.close(); } listFile.close(); if (saveCutoff != cutoff) { if (hard) { saveCutoff = m->ceilDist(saveCutoff, precision); } else { saveCutoff = m->roundDist(saveCutoff, precision); } m->mothurOut("changed cutoff to " + toString(cutoff)); m->mothurOutEndLine(); } //set list file as new current listfile string current = ""; itTypes = outputTypes.find("list"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { current = (itTypes->second)[0]; m->setListFile(current); } } //set rabund file as new current rabundfile itTypes = outputTypes.find("rabund"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { current = (itTypes->second)[0]; m->setRabundFile(current); } } //set sabund file as new current sabundfile itTypes = outputTypes.find("sabund"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { current = (itTypes->second)[0]; m->setSabundFile(current); } } m->mothurOutEndLine(); m->mothurOut("Output File Names: "); m->mothurOutEndLine(); for (int i = 0; i < outputNames.size(); i++) { m->mothurOut(outputNames[i]); m->mothurOutEndLine(); } m->mothurOutEndLine(); //if (m->isTrue(timing)) { m->mothurOut("It took " + toString(time(NULL) - estart) + " seconds to cluster"); m->mothurOutEndLine(); //} return 0; } catch(exception& e) { m->errorOut(e, "ClusterCommand", "execute"); exit(1); } }
int SeqSummaryCommand::execute(){ try{ if (abort == true) { if (calledHelp) { return 0; } return 2; } int start = time(NULL); //set current fasta to fastafile m->setFastaFile(fastafile); map<string, string> variables; variables["[filename]"] = outputDir + m->getRootName(m->getSimpleName(fastafile)); string summaryFile = getOutputFileName("summary",variables); long long numSeqs = 0; long long size = 0; long long numUniques = 0; map<int, long long> startPosition; map<int, long long> endPosition; map<int, long long> seqLength; map<int, long long> ambigBases; map<int, long long> longHomoPolymer; if (namefile != "") { nameMap = m->readNames(namefile); numUniques = nameMap.size(); } else if (countfile != "") { CountTable ct; ct.readTable(countfile, false, false); nameMap = ct.getNameMap(); size = ct.getNumSeqs(); numUniques = ct.getNumUniqueSeqs(); } if (m->control_pressed) { return 0; } vector<unsigned long long> positions; #if defined (__APPLE__) || (__MACH__) || (linux) || (__linux) || (__linux__) || (__unix__) || (__unix) positions = m->divideFile(fastafile, processors); for (int i = 0; i < (positions.size()-1); i++) { lines.push_back(new linePair(positions[i], positions[(i+1)])); } #else positions = m->setFilePosFasta(fastafile, numSeqs); if (numSeqs < processors) { processors = numSeqs; } //figure out how many sequences you have to process int numSeqsPerProcessor = numSeqs / processors; for (int i = 0; i < processors; i++) { int startIndex = i * numSeqsPerProcessor; if(i == (processors - 1)){ numSeqsPerProcessor = numSeqs - i * numSeqsPerProcessor; } lines.push_back(new linePair(positions[startIndex], numSeqsPerProcessor)); } #endif if(processors == 1){ numSeqs = driverCreateSummary(startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, fastafile, summaryFile, lines[0]); }else{ numSeqs = createProcessesCreateSummary(startPosition, endPosition, seqLength, ambigBases, longHomoPolymer, fastafile, summaryFile); } if (m->control_pressed) { return 0; } //set size if (countfile != "") {}//already set else if (namefile == "") { size = numSeqs; } else { for (map<int, long long>::iterator it = startPosition.begin(); it != startPosition.end(); it++) { size += it->second; } } if ((namefile != "") || (countfile != "")) { string type = "count"; if (namefile != "") { type = "name"; } if (numSeqs != numUniques) { // do fasta and name/count files match m->mothurOut("[ERROR]: Your " + type + " file contains " + toString(numUniques) + " unique sequences, but your fasta file contains " + toString(numSeqs) + ". File mismatch detected, quitting command.\n"); m->control_pressed = true; } } if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } long long ptile0_25 = 1+(long long)(size * 0.025); //number of sequences at 2.5% long long ptile25 = 1+(long long)(size * 0.250); //number of sequences at 25% long long ptile50 = 1+(long long)(size * 0.500); long long ptile75 = 1+(long long)(size * 0.750); long long ptile97_5 = 1+(long long)(size * 0.975); long long ptile100 = (long long)(size); vector<int> starts; starts.resize(7,0); vector<int> ends; ends.resize(7,0); vector<int> ambigs; ambigs.resize(7,0); vector<int> lengths; lengths.resize(7,0); vector<int> homops; homops.resize(7,0); //find means long long meanStartPosition, meanEndPosition, meanSeqLength, meanAmbigBases, meanLongHomoPolymer; meanStartPosition = 0; meanEndPosition = 0; meanSeqLength = 0; meanAmbigBases = 0; meanLongHomoPolymer = 0; //minimum if ((startPosition.begin())->first == -1) { starts[0] = 0; } else {starts[0] = (startPosition.begin())->first; } long long totalSoFar = 0; //set all values to min starts[1] = starts[0]; starts[2] = starts[0]; starts[3] = starts[0]; starts[4] = starts[0]; starts[5] = starts[0]; int lastValue = 0; for (map<int, long long>::iterator it = startPosition.begin(); it != startPosition.end(); it++) { int value = it->first; if (value == -1) { value = 0; } meanStartPosition += (value*it->second); totalSoFar += it->second; if (((totalSoFar <= ptile0_25) && (totalSoFar > 1)) || ((lastValue < ptile0_25) && (totalSoFar > ptile0_25))){ starts[1] = value; } //save value if (((totalSoFar <= ptile25) && (totalSoFar > ptile0_25)) || ((lastValue < ptile25) && (totalSoFar > ptile25))) { starts[2] = value; } //save value if (((totalSoFar <= ptile50) && (totalSoFar > ptile25)) || ((lastValue < ptile50) && (totalSoFar > ptile50))) { starts[3] = value; } //save value if (((totalSoFar <= ptile75) && (totalSoFar > ptile50)) || ((lastValue < ptile75) && (totalSoFar > ptile75))) { starts[4] = value; } //save value if (((totalSoFar <= ptile97_5) && (totalSoFar > ptile75)) || ((lastValue < ptile97_5) && (totalSoFar > ptile97_5))) { starts[5] = value; } //save value if ((totalSoFar <= ptile100) && (totalSoFar > ptile97_5)) { starts[6] = value; } //save value lastValue = totalSoFar; } starts[6] = (startPosition.rbegin())->first; if ((endPosition.begin())->first == -1) { ends[0] = 0; } else {ends[0] = (endPosition.begin())->first; } totalSoFar = 0; //set all values to min ends[1] = ends[0]; ends[2] = ends[0]; ends[3] = ends[0]; ends[4] = ends[0]; ends[5] = ends[0]; lastValue = 0; for (map<int, long long>::iterator it = endPosition.begin(); it != endPosition.end(); it++) { int value = it->first; if (value == -1) { value = 0; } meanEndPosition += (value*it->second); totalSoFar += it->second; if (((totalSoFar <= ptile0_25) && (totalSoFar > 1)) || ((lastValue < ptile0_25) && (totalSoFar > ptile0_25))){ ends[1] = value; } //save value if (((totalSoFar <= ptile25) && (totalSoFar > ptile0_25)) || ((lastValue < ptile25) && (totalSoFar > ptile25))) { ends[2] = value; } //save value if (((totalSoFar <= ptile50) && (totalSoFar > ptile25)) || ((lastValue < ptile50) && (totalSoFar > ptile50))) { ends[3] = value; } //save value if (((totalSoFar <= ptile75) && (totalSoFar > ptile50)) || ((lastValue < ptile75) && (totalSoFar > ptile75))) { ends[4] = value; } //save value if (((totalSoFar <= ptile97_5) && (totalSoFar > ptile75)) || ((lastValue < ptile97_5) && (totalSoFar > ptile97_5))) { ends[5] = value; } //save value if ((totalSoFar <= ptile100) && (totalSoFar > ptile97_5)) { ends[6] = value; } //save value lastValue = totalSoFar; } ends[6] = (endPosition.rbegin())->first; if ((seqLength.begin())->first == -1) { lengths[0] = 0; } else {lengths[0] = (seqLength.begin())->first; } //set all values to min lengths[1] = lengths[0]; lengths[2] = lengths[0]; lengths[3] = lengths[0]; lengths[4] = lengths[0]; lengths[5] = lengths[0]; totalSoFar = 0; lastValue = 0; for (map<int, long long>::iterator it = seqLength.begin(); it != seqLength.end(); it++) { int value = it->first; meanSeqLength += (value*it->second); totalSoFar += it->second; if (((totalSoFar <= ptile0_25) && (totalSoFar > 1)) || ((lastValue < ptile0_25) && (totalSoFar > ptile0_25))){ lengths[1] = value; } //save value if (((totalSoFar <= ptile25) && (totalSoFar > ptile0_25)) || ((lastValue < ptile25) && (totalSoFar > ptile25))) { lengths[2] = value; } //save value if (((totalSoFar <= ptile50) && (totalSoFar > ptile25)) || ((lastValue < ptile50) && (totalSoFar > ptile50))) { lengths[3] = value; } //save value if (((totalSoFar <= ptile75) && (totalSoFar > ptile50)) || ((lastValue < ptile75) && (totalSoFar > ptile75))) { lengths[4] = value; } //save value if (((totalSoFar <= ptile97_5) && (totalSoFar > ptile75)) || ((lastValue < ptile97_5) && (totalSoFar > ptile97_5))) { lengths[5] = value; } //save value if ((totalSoFar <= ptile100) && (totalSoFar > ptile97_5)) { lengths[6] = value; } //save value lastValue = totalSoFar; } lengths[6] = (seqLength.rbegin())->first; if ((ambigBases.begin())->first == -1) { ambigs[0] = 0; } else {ambigs[0] = (ambigBases.begin())->first; } //set all values to min ambigs[1] = ambigs[0]; ambigs[2] = ambigs[0]; ambigs[3] = ambigs[0]; ambigs[4] = ambigs[0]; ambigs[5] = ambigs[0]; totalSoFar = 0; lastValue = 0; for (map<int, long long>::iterator it = ambigBases.begin(); it != ambigBases.end(); it++) { int value = it->first; meanAmbigBases += (value*it->second); totalSoFar += it->second; if (((totalSoFar <= ptile0_25) && (totalSoFar > 1)) || ((lastValue < ptile0_25) && (totalSoFar > ptile0_25))){ ambigs[1] = value; } //save value if (((totalSoFar <= ptile25) && (totalSoFar > ptile0_25)) || ((lastValue < ptile25) && (totalSoFar > ptile25))) { ambigs[2] = value; } //save value if (((totalSoFar <= ptile50) && (totalSoFar > ptile25)) || ((lastValue < ptile50) && (totalSoFar > ptile50))) { ambigs[3] = value; } //save value if (((totalSoFar <= ptile75) && (totalSoFar > ptile50)) || ((lastValue < ptile75) && (totalSoFar > ptile75))) { ambigs[4] = value; } //save value if (((totalSoFar <= ptile97_5) && (totalSoFar > ptile75)) || ((lastValue < ptile97_5) && (totalSoFar > ptile97_5))) { ambigs[5] = value; } //save value if ((totalSoFar <= ptile100) && (totalSoFar > ptile97_5)) { ambigs[6] = value; } //save value lastValue = totalSoFar; } ambigs[6] = (ambigBases.rbegin())->first; if ((longHomoPolymer.begin())->first == -1) { homops[0] = 0; } else {homops[0] = (longHomoPolymer.begin())->first; } //set all values to min homops[1] = homops[0]; homops[2] = homops[0]; homops[3] = homops[0]; homops[4] = homops[0]; homops[5] = homops[0]; totalSoFar = 0; lastValue = 0; for (map<int, long long>::iterator it = longHomoPolymer.begin(); it != longHomoPolymer.end(); it++) { int value = it->first; meanLongHomoPolymer += (it->first*it->second); totalSoFar += it->second; if (((totalSoFar <= ptile0_25) && (totalSoFar > 1)) || ((lastValue < ptile0_25) && (totalSoFar > ptile0_25))){ homops[1] = value; } //save value if (((totalSoFar <= ptile25) && (totalSoFar > ptile0_25)) || ((lastValue < ptile25) && (totalSoFar > ptile25))) { homops[2] = value; } //save value if (((totalSoFar <= ptile50) && (totalSoFar > ptile25)) || ((lastValue < ptile50) && (totalSoFar > ptile50))) { homops[3] = value; } //save value if (((totalSoFar <= ptile75) && (totalSoFar > ptile50)) || ((lastValue < ptile75) && (totalSoFar > ptile75))) { homops[4] = value; } //save value if (((totalSoFar <= ptile97_5) && (totalSoFar > ptile75)) || ((lastValue < ptile97_5) && (totalSoFar > ptile97_5))) { homops[5] = value; } //save value if ((totalSoFar <= ptile100) && (totalSoFar > ptile97_5)) { homops[6] = value; } //save value lastValue = totalSoFar; } homops[6] = (longHomoPolymer.rbegin())->first; double meanstartPosition, meanendPosition, meanseqLength, meanambigBases, meanlongHomoPolymer; meanstartPosition = meanStartPosition / (double) size; meanendPosition = meanEndPosition /(double) size; meanlongHomoPolymer = meanLongHomoPolymer / (double) size; meanseqLength = meanSeqLength / (double) size; meanambigBases = meanAmbigBases /(double) size; if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } m->mothurOutEndLine(); m->mothurOut("\t\tStart\tEnd\tNBases\tAmbigs\tPolymer\tNumSeqs"); m->mothurOutEndLine(); m->mothurOut("Minimum:\t" + toString(starts[0]) + "\t" + toString(ends[0]) + "\t" + toString(lengths[0]) + "\t" + toString(ambigs[0]) + "\t" + toString(homops[0]) + "\t" + toString(1)); m->mothurOutEndLine(); m->mothurOut("2.5%-tile:\t" + toString(starts[1]) + "\t" + toString(ends[1]) + "\t" + toString(lengths[1]) + "\t" + toString(ambigs[1]) + "\t" + toString(homops[1]) + "\t" + toString(ptile0_25)); m->mothurOutEndLine(); m->mothurOut("25%-tile:\t" + toString(starts[2]) + "\t" + toString(ends[2]) + "\t" + toString(lengths[2]) + "\t" + toString(ambigs[2]) + "\t" + toString(homops[2]) + "\t" + toString(ptile25)); m->mothurOutEndLine(); m->mothurOut("Median: \t" + toString(starts[3]) + "\t" + toString(ends[3]) + "\t" + toString(lengths[3]) + "\t" + toString(ambigs[3]) + "\t" + toString(homops[3]) + "\t" + toString(ptile50)); m->mothurOutEndLine(); m->mothurOut("75%-tile:\t" + toString(starts[4]) + "\t" + toString(ends[4]) + "\t" + toString(lengths[4]) + "\t" + toString(ambigs[4]) + "\t" + toString(homops[4]) + "\t" + toString(ptile75)); m->mothurOutEndLine(); m->mothurOut("97.5%-tile:\t" + toString(starts[5]) + "\t" + toString(ends[5]) + "\t" + toString(lengths[5]) + "\t" + toString(ambigs[5]) + "\t" + toString(homops[5]) + "\t" + toString(ptile97_5)); m->mothurOutEndLine(); m->mothurOut("Maximum:\t" + toString(starts[6]) + "\t" + toString(ends[6]) + "\t" + toString(lengths[6]) + "\t" + toString(ambigs[6]) + "\t" + toString(homops[6]) + "\t" + toString(ptile100)); m->mothurOutEndLine(); m->mothurOut("Mean:\t" + toString(meanstartPosition) + "\t" + toString(meanendPosition) + "\t" + toString(meanseqLength) + "\t" + toString(meanambigBases) + "\t" + toString(meanlongHomoPolymer)); m->mothurOutEndLine(); if ((namefile == "") && (countfile == "")) { m->mothurOut("# of Seqs:\t" + toString(numSeqs)); m->mothurOutEndLine(); } else { m->mothurOut("# of unique seqs:\t" + toString(numSeqs)); m->mothurOutEndLine(); m->mothurOut("total # of seqs:\t" + toString(size)); m->mothurOutEndLine(); } if (m->control_pressed) { m->mothurRemove(summaryFile); return 0; } m->mothurOutEndLine(); m->mothurOut("Output File Names: "); m->mothurOutEndLine(); m->mothurOut(summaryFile); m->mothurOutEndLine(); outputNames.push_back(summaryFile); outputTypes["summary"].push_back(summaryFile); m->mothurOutEndLine(); if ((namefile == "") && (countfile == "")) { m->mothurOut("It took " + toString(time(NULL) - start) + " secs to summarize " + toString(numSeqs) + " sequences.\n"); } else{ m->mothurOut("It took " + toString(time(NULL) - start) + " secs to summarize " + toString(size) + " sequences.\n"); } //set fasta file as new current fastafile string current = ""; itTypes = outputTypes.find("summary"); if (itTypes != outputTypes.end()) { if ((itTypes->second).size() != 0) { current = (itTypes->second)[0]; m->setSummaryFile(current); } } return 0; } catch(exception& e) { m->errorOut(e, "SeqSummaryCommand", "execute"); exit(1); } }
int OptiBlastMatrix::readBlast(){ try { Utils util; map<string, long long> nameAssignment; if (namefile != "") { util.readNames(namefile, nameAssignment); } else if (countfile != "") { CountTable ct; ct.readTable(countfile, false, true); map<string, int> temp = ct.getNameMap(); for (map<string, int>::iterator it = temp.begin(); it!= temp.end(); it++) { nameAssignment[it->first] = it->second; } } else { readBlastNames(nameAssignment); } int count = 0; for (map<string, long long>::iterator it = nameAssignment.begin(); it!= nameAssignment.end(); it++) { it->second = count; count++; nameMap.push_back(it->first); overlapNameMap.push_back(it->first); } m->mothurOut("Reading Blast File... "); cout.flush(); string firstName, secondName, eScore, currentRow; currentRow = ""; string repeatName = ""; float distance, thisoverlap, refScore; float percentId; float numBases, mismatch, gap, startQuery, endQuery, startRef, endRef, score, lengthThisSeq; map<string, float> thisRowsBlastScores; ///////////////////// Read to eliminate singletons /////////////////////// ifstream fileHandle; util.openInputFile(distFile, fileHandle); map<int, int> singletonIndexSwap; map<int, int> blastSingletonIndexSwap; vector<bool> singleton; singleton.resize(nameAssignment.size(), true); vector<bool> overlapSingleton; overlapSingleton.resize(nameAssignment.size(), true); vector< map<string,float> > dists; dists.resize(nameAssignment.size()); if (!fileHandle.eof()) { //read in line from file fileHandle >> firstName >> secondName >> percentId >> numBases >> mismatch >> gap >> startQuery >> endQuery >> startRef >> endRef >> eScore >> score; util.gobble(fileHandle); currentRow = firstName; lengthThisSeq = numBases; repeatName = firstName + secondName; if (firstName == secondName) { refScore = score; } else{ thisRowsBlastScores[secondName] = score; //calc overlap score thisoverlap = 1.0 - (percentId * (lengthThisSeq - startQuery) / endRef / 100.0 - penalty); //if there is a valid overlap, add it if ((startRef <= length) && ((endQuery+length) >= lengthThisSeq) && (thisoverlap <= cutoff)) { //convert name to number map<string,long long>::iterator itA = nameAssignment.find(firstName); map<string,long long>::iterator itB = nameAssignment.find(secondName); if(itA == nameAssignment.end()){ m->mothurOut("AAError: Sequence '" + firstName + "' was not found in the names file, please correct\n"); exit(1); } if(itB == nameAssignment.end()){ m->mothurOut("ABError: Sequence '" + secondName + "' was not found in the names file, please correct\n"); exit(1); } int indexA = (itA->second); int indexB = (itB->second); overlapSingleton[indexA] = false; overlapSingleton[indexB] = false; blastSingletonIndexSwap[indexA] = indexA; blastSingletonIndexSwap[indexB] = indexB; } } }else { m->mothurOut("Error in your blast file, cannot read."); m->mothurOutEndLine(); exit(1); } while(fileHandle){ //let's assume it's a triangular matrix... if (m->getControl_pressed()) { fileHandle.close(); return 0; } //read in line from file fileHandle >> firstName >> secondName >> percentId >> numBases >> mismatch >> gap >> startQuery >> endQuery >> startRef >> endRef >> eScore >> score; util.gobble(fileHandle); string temp = firstName + secondName; //to check if this file has repeat lines, ie. is this a blast instead of a blscreen file //if this is a new pairing if (temp != repeatName) { repeatName = temp; if (currentRow == firstName) { if (firstName == secondName) { refScore = score; } else{ //save score thisRowsBlastScores[secondName] = score; //calc overlap score thisoverlap = 1.0 - (percentId * (lengthThisSeq - startQuery) / endRef / 100.0 - penalty); //if there is a valid overlap, add it if ((startRef <= length) && ((endQuery+length) >= lengthThisSeq) && (thisoverlap <= cutoff)) { //convert name to number map<string,long long>::iterator itA = nameAssignment.find(firstName); map<string,long long>::iterator itB = nameAssignment.find(secondName); if(itA == nameAssignment.end()){ m->mothurOut("AAError: Sequence '" + firstName + "' was not found in the names file, please correct\n"); exit(1); } if(itB == nameAssignment.end()){ m->mothurOut("ABError: Sequence '" + secondName + "' was not found in the names file, please correct\n"); exit(1); } int indexA = (itA->second); int indexB = (itB->second); overlapSingleton[indexA] = false; overlapSingleton[indexB] = false; blastSingletonIndexSwap[indexA] = indexA; blastSingletonIndexSwap[indexB] = indexB; } } //end else }else { //end row //convert blast scores to distance and add cell to sparse matrix if we can map<string, float>::iterator it; map<string, float>::iterator itDist; for(it=thisRowsBlastScores.begin(); it!=thisRowsBlastScores.end(); it++) { distance = 1.0 - (it->second / refScore); //do we already have the distance calculated for b->a map<string,long long>::iterator itA = nameAssignment.find(currentRow); map<string,long long>::iterator itB = nameAssignment.find(it->first); itDist = dists[itB->second].find(itA->first); //if we have it then compare if (itDist != dists[itB->second].end()) { //if you want the minimum blast score ratio, then pick max distance if(minWanted) { distance = max(itDist->second, distance); } else{ distance = min(itDist->second, distance); } //is this distance below cutoff if (distance <= cutoff) { int indexA = (itA->second); int indexB = (itB->second); singleton[indexA] = false; singleton[indexB] = false; singletonIndexSwap[indexA] = indexA; singletonIndexSwap[indexB] = indexB; } //not going to need this again dists[itB->second].erase(itDist); }else { //save this value until we get the other ratio dists[itA->second][it->first] = distance; } } //clear out last rows info thisRowsBlastScores.clear(); currentRow = firstName; lengthThisSeq = numBases; //add this row to thisRowsBlastScores if (firstName == secondName) { refScore = score; } else{ //add this row to thisRowsBlastScores thisRowsBlastScores[secondName] = score; //calc overlap score thisoverlap = 1.0 - (percentId * (lengthThisSeq - startQuery) / endRef / 100.0 - penalty); //if there is a valid overlap, add it if ((startRef <= length) && ((endQuery+length) >= lengthThisSeq) && (thisoverlap <= cutoff)) { //convert name to number map<string,long long>::iterator itA = nameAssignment.find(firstName); map<string,long long>::iterator itB = nameAssignment.find(secondName); if(itA == nameAssignment.end()){ m->mothurOut("AAError: Sequence '" + firstName + "' was not found in the names file, please correct\n"); exit(1); } if(itB == nameAssignment.end()){ m->mothurOut("ABError: Sequence '" + secondName + "' was not found in the names file, please correct\n"); exit(1); } int indexA = (itA->second); int indexB = (itB->second); overlapSingleton[indexA] = false; overlapSingleton[indexB] = false; blastSingletonIndexSwap[indexA] = indexA; blastSingletonIndexSwap[indexB] = indexB; } } }//end if current row }//end if repeat } fileHandle.close(); //convert blast scores to distance and add cell to sparse matrix if we can map<string, float>::iterator it; map<string, float>::iterator itDist; for(it=thisRowsBlastScores.begin(); it!=thisRowsBlastScores.end(); it++) { distance = 1.0 - (it->second / refScore); //do we already have the distance calculated for b->a map<string,long long>::iterator itA = nameAssignment.find(currentRow); map<string,long long>::iterator itB = nameAssignment.find(it->first); itDist = dists[itB->second].find(itA->first); //if we have it then compare if (itDist != dists[itB->second].end()) { //if you want the minimum blast score ratio, then pick max distance if(minWanted) { distance = max(itDist->second, distance); } else{ distance = min(itDist->second, distance); } //is this distance below cutoff if (distance <= cutoff) { int indexA = (itA->second); int indexB = (itB->second); singleton[indexA] = false; singleton[indexB] = false; singletonIndexSwap[indexA] = indexA; singletonIndexSwap[indexB] = indexB; } //not going to need this again dists[itB->second].erase(itDist); }else { //save this value until we get the other ratio dists[itA->second][it->first] = distance; } } //clear out info thisRowsBlastScores.clear(); dists.clear(); ////////////////////////////////////////////////////////////////////////// int nonSingletonCount = 0; for (int i = 0; i < singleton.size(); i++) { if (!singleton[i]) { //if you are a singleton singletonIndexSwap[i] = nonSingletonCount; nonSingletonCount++; }else { singletons.push_back(nameMap[i]); } } singleton.clear(); int overlapNonSingletonCount = 0; for (int i = 0; i < overlapSingleton.size(); i++) { if (!overlapSingleton[i]) { //if you are a singleton blastSingletonIndexSwap[i] = overlapNonSingletonCount; overlapNonSingletonCount++; } } overlapSingleton.clear(); ifstream in; util.openInputFile(distFile, in); dists.resize(nameAssignment.size()); closeness.resize(nonSingletonCount); blastOverlap.resize(overlapNonSingletonCount); map<string, string> names; if (namefile != "") { util.readNames(namefile, names); for (int i = 0; i < singletons.size(); i++) { singletons[i] = names[singletons[i]]; } } m->mothurOut(" halfway ... "); cout.flush(); if (!in.eof()) { //read in line from file in >> firstName >> secondName >> percentId >> numBases >> mismatch >> gap >> startQuery >> endQuery >> startRef >> endRef >> eScore >> score; util.gobble(fileHandle); currentRow = firstName; lengthThisSeq = numBases; repeatName = firstName + secondName; if (firstName == secondName) { refScore = score; } else{ //convert name to number map<string,long long>::iterator itA = nameAssignment.find(firstName); map<string,long long>::iterator itB = nameAssignment.find(secondName); if(itA == nameAssignment.end()){ m->mothurOut("AAError: Sequence '" + firstName + "' was not found in the names file, please correct\n"); exit(1); } if(itB == nameAssignment.end()){ m->mothurOut("ABError: Sequence '" + secondName + "' was not found in the names file, please correct\n"); exit(1); } thisRowsBlastScores[secondName] = score; if (namefile != "") { firstName = names[firstName]; //redundant names secondName = names[secondName]; //redundant names } nameMap[singletonIndexSwap[itA->second]] = firstName; nameMap[singletonIndexSwap[itB->second]] = secondName; //calc overlap score thisoverlap = 1.0 - (percentId * (lengthThisSeq - startQuery) / endRef / 100.0 - penalty); //if there is a valid overlap, add it if ((startRef <= length) && ((endQuery+length) >= lengthThisSeq) && (thisoverlap <= cutoff)) { int indexA = (itA->second); int indexB = (itB->second); int newB = blastSingletonIndexSwap[indexB]; int newA = blastSingletonIndexSwap[indexA]; blastOverlap[newA].insert(newB); blastOverlap[newB].insert(newA); overlapNameMap[newA] = firstName; overlapNameMap[newB] = secondName; } } }else { m->mothurOut("Error in your blast file, cannot read."); m->mothurOutEndLine(); exit(1); }