int main(int argc, char *argv[]) { char *ortho1; char *ortho2; char *consensus; char *err; struct sizeList *ortho1List=NULL; struct sizeList *ortho2List=NULL; struct sizeList *consensusList=NULL; FILE *consensusFile=NULL; FILE *errFile=NULL; if(argc != 5) usage(); ortho1 = cloneString(argv[1]); // liftOver ortho2 = cloneString(argv[2]); // Mercator consensus = cloneString(argv[3]); // Consensus err = cloneString(argv[4]); // errors ortho1List = getRegions(ortho1, FALSE); // liftOver - include random chroms ortho2List = getRegions(ortho2, TRUE ); // Mercator - exclude random chroms consensusFile = mustOpen(consensus, "w"); errFile = mustOpen(err, "w"); consensusList = unionSizeLists(ortho1List, ortho2List, errFile); consensusList = unionSizeLists(consensusList, ortho1List, errFile); consensusList = unionSizeLists(consensusList, ortho2List, errFile); writeSizeListToBedFile(consensusFile, consensusList); return 0; }
TilePosition Commander::findUnfortifiedChokePoint() { double bestDist = 0; Chokepoint* bestChoke = NULL; for(set<BWTA::Region*>::const_iterator i=getRegions().begin();i!=getRegions().end();i++) { if (isOccupied((*i))) { for(set<Chokepoint*>::const_iterator c=(*i)->getChokepoints().begin();c!=(*i)->getChokepoints().end();c++) { if (isEdgeChokepoint((*c))) { if (!chokePointFortified(TilePosition((*c)->getCenter()))) { double cDist = Broodwar->self()->getStartLocation().getDistance(TilePosition((*c)->getCenter())); if (cDist > bestDist) { bestDist = cDist; bestChoke = (*c); } } } } } } TilePosition buildPos = TilePosition(-1, -1); if (bestChoke != NULL) { buildPos = TilePosition(bestChoke->getCenter()); } return buildPos; }
FEM_Partition::~FEM_Partition() { if (elem2chunk) {delete[] elem2chunk;elem2chunk=NULL;} for (int i=0;i<getRegions();i++) { delete regions[i].layer; delete regions[i].stencil; } }
void doGenePredPal(struct sqlConnection *conn) /* Output genePred protein alignment. */ { if (doGalaxy() && !cgiOptionalString(hgtaDoGalaxyQuery)) { sendParamsToGalaxy(hgtaDoPalOut, "submit"); return; } /* get rid of pesky cookies that would bring us back here */ cartRemove(cart, hgtaDoPal); cartRemove(cart, hgtaDoPalOut); if (anyIntersection() && intersectionIsBpWise()) errAbort("Can't do CDS FASTA output when bit-wise intersection is on. " "Please go back and select another output type, or clear the intersection."); checkNoGenomeDisabled(database, curTable); struct lm *lm = lmInit(64*1024); int fieldCount; struct bed *bedList = cookedBedsOnRegions(conn, curTable, getRegions(), lm, &fieldCount); //lmCleanup(&lm); textOpen(); int outCount = palOutPredsInBeds(conn, cart, bedList, curTable); /* Do some error diagnostics for user. */ if (outCount == 0) explainWhyNoResults(NULL); }
int bammer_main(args *pars){ gf=(getFasta *) allMethods[1]; func = pars->callback; std::vector<char *> nams; //read bamfiles if(pars->inputfiles) nams = angsd::getFilenames(pars->inputfiles,pars->nInd); else nams.push_back(strdup(pars->inputfile));//if only one file just push bamfile bufReader *rd = initializeBufReaders2(nams); //read regions std::vector<char *> regionsRaw; if(pars->regionfile) regionsRaw = getRegions(pars->regionfile); else if(pars->region!=NULL){ regionsRaw.push_back(strdup(pars->region));//if only one file just push bamfile } std::vector<regs> regions; for(size_t i=0;i<regionsRaw.size();i++){ regs tmpRegs; if(parse_region(regionsRaw[i],*rd[0].hd,tmpRegs.refID,tmpRegs.start,tmpRegs.stop)<0||tmpRegs.stop<tmpRegs.start){ fprintf(stderr,"[%s] problems with indexing: %s\n",__FUNCTION__,regionsRaw[i]); exit(0); }else regions.push_back(tmpRegs); } //printReg(stderr,regions) //each filereader contains a filename for a bam, if we try to read remote files, then we need to download the bai file, and update the filename if(regions.size()!=0) modNames(rd,nams.size()); extern int maxThreads; if(pars->jobtype==2) uppile(pars->show,maxThreads,rd,pars->nLines,nams.size(),regions); else if(pars->jobtype==1) while(motherView(rd,nams.size(),regions)); else{ fprintf(stderr,"nothing to do? what should program do?\n"); } //cleanup stuff for(int i=0;i<(int)nams.size();i++){ free(nams[i]); dalloc_bufReader(rd[i]); } for(size_t i=0;i<regions.size();i++) free(regionsRaw[i]); delete [] rd; free(pars->inputfiles);free(pars->inputfile);free(pars->region); delete pars; return 0; }
TilePosition Commander::findChokePoint() { //First, check the DefenseLocator //for a stored defense position. DefenseLocator* df = DefenseLocator::getInstance(); TilePosition storedPos = df->getBaseDefensePos(Broodwar->mapHash()); if (storedPos.x() != -1) return storedPos; double bestPrio = -1; Chokepoint* bestChoke = NULL; for(set<BWTA::Region*>::const_iterator i=getRegions().begin();i!=getRegions().end();i++) { if (isOccupied((*i))) { for(set<Chokepoint*>::const_iterator c=(*i)->getChokepoints().begin();c!=(*i)->getChokepoints().end();c++) { if (isEdgeChokepoint((*c))) { double cPrio = getChokepointPrio(TilePosition((*c)->getCenter())); if (cPrio > bestPrio) { bestPrio = cPrio; bestChoke = (*c); } } } } } TilePosition guardPos = Broodwar->self()->getStartLocation(); if (bestChoke != NULL) { guardPos = findDefensePos(bestChoke); //guardPos = TilePosition(bestChoke->getCenter()); //Pre-calculate path TilePosition b = ExplorationManager::getInstance()->getClosestSpottedBuilding(guardPos); if (b.x() >= 0) { Pathfinder::getInstance()->requestPath(guardPos, b); } } return guardPos; }
ExplorationManager::ExplorationManager() { active = true; ownForce.reset(); enemyForce.reset(); //Add the regions for this map for(set<BWTA::Region*>::const_iterator i=getRegions().begin();i!=getRegions().end();i++) { exploreData.push_back(ExploreData((*i)->getCenter())); } siteSetFrame = 0; lastCallFrame = Broodwar->getFrameCount(); expansionSite = TilePosition(-1, -1); }
void doOutGff(char *table, struct sqlConnection *conn, boolean outputGtf) /* Save as GFF/GTF. */ { struct hTableInfo *hti = getHti(database, table, conn); struct bed *bedList; struct hash *chromHash = NULL; struct slName *exonFramesList = NULL; char source[HDB_MAX_TABLE_STRING]; int itemCount; struct region *region, *regionList = getRegions(); textOpen(); int efIdx = -1; safef(source, sizeof(source), "%s_%s", database, table); if (conn) { boolean simpleTableExists = sqlTableExists(conn, table); // simpleTable means not split table, not custom track // However it still can include bbi table with bam fileName path if (simpleTableExists) // no tables having exonFrames are split tables anyway efIdx = sqlFieldIndex(conn, table, "exonFrames"); itemCount = 0; int regionCount = slCount(regionList); // regionList can have many thousands of items e.g. rheMac3 has 34000 chroms! // This regionCount threshold should be just above the # chroms in the latest human assembly if (simpleTableExists && (regionCount > 500)) { chromHash = makeChromHashForTable(conn, table); } } // Note: code could be added here to extract exonFrames from bigGenePred // Process each region for (region = regionList; region != NULL; region = region->next) { if (chromHash && (!hashFindVal(chromHash, region->chrom))) continue; struct lm *lm = lmInit(64*1024); int fieldCount; bedList = cookedBedList(conn, table, region, lm, &fieldCount); // Use exonFrames field if available for better accuracy instead of calculating from coordinates if (efIdx != -1) exonFramesList = getExonFrames(table, conn, bedList); itemCount += bedToGffLines(bedList, exonFramesList, hti, fieldCount, source, outputGtf); lmCleanup(&lm); } if (itemCount == 0) hPrintf(NO_RESULTS); }
bool CSMWorld::Data::hasId (const std::string& id) const { return getGlobals().searchId (id)!=-1 || getGmsts().searchId (id)!=-1 || getSkills().searchId (id)!=-1 || getClasses().searchId (id)!=-1 || getFactions().searchId (id)!=-1 || getRaces().searchId (id)!=-1 || getSounds().searchId (id)!=-1 || getScripts().searchId (id)!=-1 || getRegions().searchId (id)!=-1 || getBirthsigns().searchId (id)!=-1 || getSpells().searchId (id)!=-1 || getTopics().searchId (id)!=-1 || getJournals().searchId (id)!=-1 || getCells().searchId (id)!=-1 || getReferenceables().searchId (id)!=-1; }
void doGenomicDna(struct sqlConnection *conn) /* Get genomic sequence (UI has already told us how). */ { struct region *region, *regionList = getRegions(); struct hTableInfo *hti = getHti(database, curTable, conn); int fieldCount; textOpen(); int resultCount = 0; for (region = regionList; region != NULL; region = region->next) { struct lm *lm = lmInit(64*1024); struct bed *bedList = cookedBedList(conn, curTable, region, lm, &fieldCount); if (bedList != NULL) resultCount += hgSeqBed(database, hti, bedList); lmCleanup(&lm); } if (!resultCount) hPrintf(NO_RESULTS); }
void doHalMaf(struct trackDb *parentTrack, char *table, struct sqlConnection *conn) /* Output regions as MAF. maf tables look bed-like enough for * cookedBedsOnRegions to handle intersections. */ { #ifdef USE_HAL struct region *region = NULL, *regionList = getRegions(); struct trackDb *tdb; if ((tdb = findTrackDb(parentTrack, table)) == NULL) errAbort("cannot find track named %s under %s\n", table, parentTrack->table); char *fileName = trackDbSetting(tdb, "bigDataUrl"); char *otherSpecies = trackDbSetting(tdb, "otherSpecies"); int handle = halOpenLOD(fileName); struct hal_species_t *speciesList = halGetSpecies(handle); for(; speciesList; speciesList = speciesList->next) { if (sameString(speciesList->name, otherSpecies)) break; } if (speciesList == NULL) errAbort("cannot find species %s in hal file %s\n", otherSpecies, fileName); speciesList->next = NULL; textOpen(); for (region = regionList; region != NULL; region = region->next) { halGetMAF(stdout, handle, speciesList, trackHubSkipHubName(database), region->chrom, region->start, region->end, FALSE); } #else // USE_HAL errAbort("hgTables not compiled with HAL support."); #endif // USE_HAL }
LayoutRegion *FormatterDeviceRegion::cloneRegion() { LayoutRegion *cloneRegion; vector<LayoutRegion*> *childRegions; cloneRegion = new FormatterDeviceRegion(id); cloneRegion->setTitle(getTitle()); cloneRegion->setLeft(left, false); cloneRegion->setTop(top, false); cloneRegion->setWidth(width, false); cloneRegion->setHeight(height, false); cloneRegion->setDecorated(false); cloneRegion->setMovable(false); cloneRegion->setResizable(false); childRegions = getRegions(); vector<LayoutRegion*>::iterator it; for (it = childRegions->begin(); it != childRegions->end(); ++it) { cloneRegion->addRegion(*it); } return cloneRegion; }
void Channel::frameAssemble( const eq::uint128_t& ) { const bool composeOnly = (_drawRange == eq::Range::ALL); _startAssemble(); const eq::Frames& frames = getInputFrames(); eq::PixelViewport coveredPVP; eq::Frames dbFrames; eq::Zoom zoom( eq::Zoom::NONE ); // Make sure all frames are ready and gather some information on them for( eq::Frames::const_iterator i = frames.begin(); i != frames.end(); ++i ) { eq::Frame* frame = *i; { eq::ChannelStatistics stat( eq::Statistic::CHANNEL_FRAME_WAIT_READY, this ); frame->waitReady( ); } const eq::Range& range = frame->getRange(); if( range == eq::Range::ALL ) // 2D frame, assemble directly eq::Compositor::assembleFrame( frame, this ); else { dbFrames.push_back( frame ); zoom = frame->getZoom(); _expandPVP( coveredPVP, frame->getImages(), frame->getOffset() ); } } coveredPVP.intersect( getPixelViewport( )); if( dbFrames.empty( )) { resetAssemblyState(); return; } // calculate correct frames sequence eq::FrameDataPtr data = _frame.getFrameData(); if( !composeOnly && coveredPVP.hasArea( )) { _frame.clear(); data->setRange( _drawRange ); dbFrames.push_back( &_frame ); } _orderFrames( dbFrames ); // Update range eq::Range newRange( 1.f, 0.f ); for( size_t i = 0; i < dbFrames.size(); ++i ) { const eq::Range range = dbFrames[i]->getRange(); if( newRange.start > range.start ) newRange.start = range.start; if( newRange.end < range.end ) newRange.end = range.end; } _drawRange = newRange; // check if current frame is in proper position, read back if not if( !composeOnly ) { if( _bgColor == eq::Vector3f::ZERO && dbFrames.front() == &_frame ) dbFrames.erase( dbFrames.begin( )); else if( coveredPVP.hasArea()) { eq::util::ObjectManager& glObjects = getObjectManager(); _frame.setOffset( eq::Vector2i( 0, 0 )); _frame.setZoom( zoom ); data->setPixelViewport( coveredPVP ); _frame.readback( glObjects, getDrawableConfig(), getRegions( )); clearViewport( coveredPVP ); // offset for assembly _frame.setOffset( eq::Vector2i( coveredPVP.x, coveredPVP.y )); } } // blend DB frames in order try { eq::Compositor::assembleFramesSorted( dbFrames, this, 0, true /*blendAlpha*/ ); } catch( const co::Exception& e ) { LBWARN << e.what() << std::endl; } resetAssemblyState(); }
int main(int argc, char **argv) { P_STR *P; SEQ_STR *SEQ; int i,j; int type; char *path; if (argc!=3) { printf(" Usage: %s seqfile type \n",argv[0]); printf(" where type stands for one of the options of \n"); printf(" \"long\", \"short\" or \"glob\"\n"); exit(1); } /*if ((path=getenv("IUPred_PATH"))==NULL) { fprintf(stderr,"IUPred_PATH environment variable is not set\n"); path="./"; } */ path="lib/disorder_apps/iupred"; printf("# IUPred \n"); printf("# Copyright (c) Zsuzsanna Dosztanyi, 2005\n"); printf("#\n"); printf("# Z. Dosztanyi, V. Csizmok, P. Tompa and I. Simon\n"); printf("# J. Mol. Biol. (2005) 347, 827-839. \n"); printf("#\n"); printf("#\n"); if ((strncmp(argv[2],"long",4))==0) { type=0; } else if ((strncmp(argv[2],"short",5))==0) { type=1; } else if ((strncmp(argv[2],"glob",4))==0) { type=2; } else { printf("Wrong argument\n");exit(1); } SEQ=malloc(sizeof(SEQ_STR)); Get_Seq(argv[1],SEQ); if (SEQ->le==0) {printf(" Sequence length 0\n");exit(1);} #ifdef DEBUG printf("%s %d\n%s\n",SEQ->name,SEQ->le,SEQ->seq); #endif P=malloc(sizeof(P_STR)); P->CC= DMatrix(AAN,AAN); if (type==0) { LC=1; UC=100; WS=10; Flag_EP=0; read_ref(path,"ss",P->CC); Get_Histo(P, path, "histo"); IUPred(SEQ,P); printf("# Prediction output \n"); printf("# %s\n",SEQ->name); for (i=0;i<SEQ->le;i++) printf("%5d %c %10.4f\n",i+1,SEQ->seq[i],SEQ->en[i]); } if (type==1) { LC=1; UC=25; WS=10; Flag_EP=1; EP=-1.26; read_ref(path,"ss_casp",P->CC); Get_Histo(P, path, "histo_casp"); IUPred(SEQ,P); printf("# Prediction output \n"); printf("# %s\n",SEQ->name); for (i=0;i<SEQ->le;i++) printf("%5d %c %10.4f\n",i+1,SEQ->seq[i],SEQ->en[i]); } if (type==2) { char *globseq; LC=1; UC=100; WS=15; Flag_EP=0; read_ref(path,"ss",P->CC); Get_Histo(P,path,"histo"); IUPred(SEQ,P); Min_Ene=DMin_Ene; JOIN=DJOIN; DEL=DDEL; getRegions(SEQ); globseq=malloc((SEQ->le+1)*sizeof(char)); for (i=0;i<SEQ->le;i++) globseq[i]=tolower(SEQ->seq[i]); printf("# Prediction output \n"); printf("# %s\n",SEQ->name); printf("Number of globular domains: %5d \n",SEQ->ngr); for (i=0;i<SEQ->ngr;i++) { printf(" globular domain %5d. %d - %d \n", i+1,SEQ->gr[i][0]+1,SEQ->gr[i][1]+1); for (j=SEQ->gr[i][0];j<SEQ->gr[i][1]+1;j++) { globseq[j]=toupper(globseq[j]); } } printf(">%s\n",SEQ->name); for (i=0;i<SEQ->le;i++) { if ((i>0)&&(i%60==0)) printf("\n"); else if ((i>0)&&(i%10==0)) printf(" "); printf("%c",globseq[i]); } printf("\n"); free(globseq); #ifdef DEBUG for (i=0;i<SEQ->le;i++) printf("%5d %c %10.4f\n",i,SEQ->seq[i],SEQ->en[i]); #endif } free(SEQ->seq); free(SEQ->eprof);free(SEQ->en);free(SEQ->smp); free(SEQ); return 0; }
void bigBedTabOut(char *db, char *table, struct sqlConnection *conn, char *fields, FILE *f) /* Print out selected fields from Big Bed. If fields is NULL, then print out all fields. */ { if (f == NULL) f = stdout; /* Convert comma separated list of fields to array. */ int fieldCount = chopByChar(fields, ',', NULL, 0); char **fieldArray; AllocArray(fieldArray, fieldCount); chopByChar(fields, ',', fieldArray, fieldCount); /* Get list of all fields in big bed and turn it into a hash of column indexes keyed by * column name. */ struct hash *fieldHash = hashNew(0); struct slName *bb, *bbList = bigBedGetFields(table, conn); int i; for (bb = bbList, i=0; bb != NULL; bb = bb->next, ++i) hashAddInt(fieldHash, bb->name, i); // If bigBed has name column, look up pasted/uploaded identifiers if any: struct hash *idHash = NULL; if (slCount(bbList) >= 4) idHash = identifierHash(db, table); /* Create an array of column indexes corresponding to the selected field list. */ int *columnArray; AllocArray(columnArray, fieldCount); for (i=0; i<fieldCount; ++i) { columnArray[i] = hashIntVal(fieldHash, fieldArray[i]); } /* Output row of labels */ fprintf(f, "#%s", fieldArray[0]); for (i=1; i<fieldCount; ++i) fprintf(f, "\t%s", fieldArray[i]); fprintf(f, "\n"); /* Open up bigBed file. */ char *fileName = bigBedFileName(table, conn); struct bbiFile *bbi = bigBedFileOpen(fileName); struct asObject *as = bigBedAsOrDefault(bbi); struct asFilter *filter = NULL; if (anyFilter()) { filter = asFilterFromCart(cart, db, table, as); if (filter) { fprintf(f, "# Filtering on %d columns\n", slCount(filter->columnList)); } } /* Loop through outputting each region */ struct region *region, *regionList = getRegions(); for (region = regionList; region != NULL; region = region->next) { struct lm *lm = lmInit(0); struct bigBedInterval *iv, *ivList = bigBedIntervalQuery(bbi, region->chrom, region->start, region->end, 0, lm); char *row[bbi->fieldCount]; char startBuf[16], endBuf[16]; for (iv = ivList; iv != NULL; iv = iv->next) { bigBedIntervalToRow(iv, region->chrom, startBuf, endBuf, row, bbi->fieldCount); if (asFilterOnRow(filter, row)) { if ((idHash != NULL) && (hashLookup(idHash, row[3]) == NULL)) continue; int i; fprintf(f, "%s", row[columnArray[0]]); for (i=1; i<fieldCount; ++i) fprintf(f, "\t%s", row[columnArray[i]]); fprintf(f, "\n"); } } lmCleanup(&lm); } /* Clean up and exit. */ bbiFileClose(&bbi); hashFree(&fieldHash); freeMem(fieldArray); freeMem(columnArray); }
boolean doGetBedOrCt(struct sqlConnection *conn, boolean doCt, boolean doCtFile, boolean redirectToGb) /* Actually output bed or custom track. Return TRUE unless no results. */ { char *db = cloneString(database); char *table = curTable; struct hTableInfo *hti = getHti(db, table, conn); struct featureBits *fbList = NULL, *fbPtr; struct customTrack *ctNew = NULL; boolean doCtHdr = (cartUsualBoolean(cart, hgtaPrintCustomTrackHeaders, FALSE) || doCt || doCtFile); char *ctWigOutType = cartCgiUsualString(cart, hgtaCtWigOutType, outWigData); char *fbQual = fbOptionsToQualifier(); char fbTQ[128]; int fields = hTableInfoBedFieldCount(hti); boolean gotResults = FALSE; struct region *region, *regionList = getRegions(); boolean isBedGr = isBedGraph(curTable); boolean isBgWg = isBigWigTable(curTable); boolean needSubtrackMerge = anySubtrackMerge(database, curTable); boolean doDataPoints = FALSE; boolean isWig = isWiggle(database, table); struct wigAsciiData *wigDataList = NULL; struct dataVector *dataVectorList = NULL; boolean doRgb = bedItemRgb(hTrackDbForTrack(db, curTable)); if (!cartUsualBoolean(cart, hgtaDoGreatOutput, FALSE) && !doCt) { textOpen(); } if (cartUsualBoolean(cart, hgtaDoGreatOutput, FALSE)) fputs("#", stdout); if ((isWig || isBedGr || isBgWg) && sameString(outWigData, ctWigOutType)) doDataPoints = TRUE; for (region = regionList; region != NULL; region = region->next) { struct bed *bedList = NULL, *bed; struct lm *lm = lmInit(64*1024); struct dataVector *dv = NULL; if (isWig && doDataPoints) { if (needSubtrackMerge) { dv = wiggleDataVector(curTrack, curTable, conn, region); if (dv != NULL) slAddHead(&dataVectorList, dv); } else { int count = 0; struct wigAsciiData *wigData = NULL; struct wigAsciiData *asciiData; struct wigAsciiData *next; wigData = getWiggleAsData(conn, curTable, region); for (asciiData = wigData; asciiData; asciiData = next) { next = asciiData->next; if (asciiData->count) { slAddHead(&wigDataList, asciiData); ++count; } } slReverse(&wigDataList); } } else if (isBedGr && doDataPoints) { dv = bedGraphDataVector(curTable, conn, region); if (dv != NULL) slAddHead(&dataVectorList, dv); } else if (isBgWg && doDataPoints) { dv = bigWigDataVector(curTable, conn, region); if (dv != NULL) slAddHead(&dataVectorList, dv); } else if (isWig || isBgWg) { dv = wiggleDataVector(curTrack, curTable, conn, region); bedList = dataVectorToBedList(dv); dataVectorFree(&dv); } else if (isBedGr) { bedList = getBedGraphAsBed(conn, curTable, region); } else { bedList = cookedBedList(conn, curTable, region, lm, &fields); } /* this is a one-time only initial creation of the custom track * structure to receive the results. gotResults turns it off after * the first time. */ if (doCtHdr && !gotResults && ((bedList != NULL) || (wigDataList != NULL) || (dataVectorList != NULL))) { ctNew = beginCustomTrack(table, fields, doCt, (isWig || isBedGr || isBgWg), doDataPoints); } if (doDataPoints && (wigDataList || dataVectorList)) gotResults = TRUE; else { if ((fbQual == NULL) || (fbQual[0] == 0)) { for (bed = bedList; bed != NULL; bed = bed->next) { if (bed->name != NULL) { subChar(bed->name, ' ', '_'); } if (doCt) { struct bed *dupe = cloneBed(bed); /* Out of local memory. */ slAddHead(&ctNew->bedList, dupe); } else { if (doRgb) bedTabOutNitemRgb(bed, fields, stdout); else bedTabOutN(bed, fields, stdout); } gotResults = TRUE; } } else { safef(fbTQ, sizeof(fbTQ), "%s:%s", hti->rootName, fbQual); fbList = fbFromBed(db, fbTQ, hti, bedList, 0, 0, FALSE, FALSE); if (fields >= 6) fields = 6; else if (fields >= 4) fields = 4; else fields = 3; if (doCt && ctNew) { ctNew->fieldCount = fields; safef(ctNew->tdb->type, strlen(ctNew->tdb->type)+1, "bed %d", fields); } for (fbPtr=fbList; fbPtr != NULL; fbPtr=fbPtr->next) { if (fbPtr->name != NULL) { char *ptr = strchr(fbPtr->name, ' '); if (ptr != NULL) *ptr = 0; } if (doCt) { struct bed *fbBed = fbToBedOne(fbPtr); slAddHead(&ctNew->bedList, fbBed ); } else { if (fields >= 6) hPrintf("%s\t%d\t%d\t%s\t%d\t%c\n", fbPtr->chrom, fbPtr->start, fbPtr->end, fbPtr->name, 0, fbPtr->strand); else if (fields >= 4) hPrintf("%s\t%d\t%d\t%s\n", fbPtr->chrom, fbPtr->start, fbPtr->end, fbPtr->name); else hPrintf("%s\t%d\t%d\n", fbPtr->chrom, fbPtr->start, fbPtr->end); } gotResults = TRUE; } featureBitsFreeList(&fbList); } } bedList = NULL; lmCleanup(&lm); } if (!gotResults) { hPrintf(NO_RESULTS); } else if (doCt) { int wigDataSize = 0; /* Load existing custom tracks and add this new one: */ struct customTrack *ctList = getCustomTracks(); removeNamedCustom(&ctList, ctNew->tdb->table); if (doDataPoints) { if (needSubtrackMerge || isBedGr || isBgWg) { slReverse(&dataVectorList); wigDataSize = dataVectorWriteWigAscii(dataVectorList, ctNew->wigAscii, 0, NULL); // TODO: see if can make prettier wig output here that // doesn't necessarily have one value per base } else { struct wiggleDataStream *wds = NULL; /* create an otherwise empty wds so we can print out the list */ wds = wiggleDataStreamNew(); wds->ascii = wigDataList; wigDataSize = wds->asciiOut(wds, db, ctNew->wigAscii, TRUE, FALSE); #if defined(DEBUG) /* dbg */ /* allow file readability for debug */ chmod(ctNew->wigAscii, 0666); #endif wiggleDataStreamFree(&wds); } } else slReverse(&ctNew->bedList); slAddHead(&ctList, ctNew); /* Save the custom tracks out to file (overwrite the old file): */ customTracksSaveCart(db, cart, ctList); /* Put up redirect-to-browser page. */ if (redirectToGb) { char browserUrl[256]; char headerText[512]; int redirDelay = 3; safef(browserUrl, sizeof(browserUrl), "%s?%s&db=%s", hgTracksName(), cartSidUrlString(cart), database); safef(headerText, sizeof(headerText), "<META HTTP-EQUIV=\"REFRESH\" CONTENT=\"%d;URL=%s\">", redirDelay, browserUrl); webStartHeader(cart, database, headerText, "Table Browser: %s %s: %s", hOrganism(database), freezeName, "get custom track"); if (doDataPoints) { hPrintf("There are %d data points in custom track. ", wigDataSize); } else { hPrintf("There are %d items in custom track. ", slCount(ctNew->bedList)); } hPrintf("You will be automatically redirected to the genome browser in\n" "%d seconds, or you can \n" "<A HREF=\"%s\">click here to continue</A>.\n", redirDelay, browserUrl); } } else if (doDataPoints) { if (needSubtrackMerge || isBedGr || isBgWg) { slReverse(&dataVectorList); dataVectorWriteWigAscii(dataVectorList, "stdout", 0, NULL); } else { /* create an otherwise empty wds so we can print out the list */ struct wiggleDataStream *wds = NULL; wds = wiggleDataStreamNew(); wds->ascii = wigDataList; wds->asciiOut(wds, db, "stdout", TRUE, FALSE); wiggleDataStreamFree(&wds); } } return gotResults; }
void doSummaryStatsWiggle(struct sqlConnection *conn) /* Put up page showing summary stats for wiggle track. */ { // grab the right trackDb for the current table. The curTrack variable // has the composite trackDb in it struct trackDb *track = hTrackDbForTrack(database, curTable); char *table = curTable; struct region *region, *regionList = getRegions(); char *regionName = getRegionName(); long long regionSize = 0; long long gapTotal = 0; long startTime = 0, wigFetchTime = 0; char splitTableOrFileName[HDB_MAX_TABLE_STRING]; struct customTrack *ct = NULL; boolean isCustom = FALSE; struct wiggleDataStream *wds = NULL; unsigned long long valuesMatched = 0; int regionCount = 0; int regionsDone = 0; unsigned span = 0; char *dataConstraint; double ll = 0.0; double ul = 0.0; boolean hasConstraint = FALSE; char *table2 = NULL; boolean fullGenome = FALSE; boolean statsHeaderDone = FALSE; boolean gotSome = FALSE; char *shortLabel = table; long long statsItemCount = 0; /* global accumulators for overall */ int statsSpan = 0; /* stats summary on a multiple region */ double statsSumData = 0.0; /* output */ double statsSumSquares = 0.0; /* " " */ double lowerLimit = INFINITY; /* " " */ double upperLimit = -1.0 * INFINITY; /* " " */ startTime = clock1000(); if (track != NULL) shortLabel = track->shortLabel; /* Count the regions, when only one, we can do more stats */ for (region = regionList; region != NULL; region = region->next) ++regionCount; htmlOpen("%s (%s) Wiggle Summary Statistics", shortLabel, table); if (anySubtrackMerge(database, curTable)) hPrintf("<P><EM><B>Note:</B> subtrack merge is currently ignored on this " "page (not implemented yet). Statistics shown here are only for " "the primary table %s (%s).</EM>", shortLabel, table); fullGenome = fullGenomeRegion(); WIG_INIT; /* ct, isCustom, hasConstraint, wds and table2 are set here */ for (region = regionList; region != NULL; region = region->next) { struct bed *intersectBedList = NULL; int operations; ++regionsDone; if (table2) intersectBedList = bedTable2(conn, region, table2); operations = wigFetchStats; #if defined(NOT) /* can't do the histogram now, that operation times out */ if (1 == regionCount) operations |= wigFetchAscii; #endif wds->setChromConstraint(wds, region->chrom); if (fullGenome) wds->setPositionConstraint(wds, 0, 0); else wds->setPositionConstraint(wds, region->start, region->end); if (hasConstraint) wds->setDataConstraint(wds, dataConstraint, ll, ul); /* depending on what is coming in on regionList, we may need to be * smart about how often we call getData for these custom tracks * since that is potentially a large file read each time. */ if (isCustom) { if (ct->dbTrack) { struct sqlConnection *trashConn = hAllocConn(CUSTOM_TRASH); struct trackDb *tdb = findTdbForTable(database, curTrack, table, ctLookupName); span = minSpan(trashConn, splitTableOrFileName, region->chrom, region->start, region->end, cart, tdb); wds->setSpanConstraint(wds, span); valuesMatched = getWigglePossibleIntersection(wds, region, CUSTOM_TRASH, table2, &intersectBedList, splitTableOrFileName, operations); hFreeConn(&trashConn); } else { valuesMatched = getWigglePossibleIntersection(wds, region, NULL, table2, &intersectBedList, splitTableOrFileName, operations); /* XXX We need to properly get the smallest span for custom tracks */ /* This is not necessarily the correct answer here */ if (wds->stats) span = wds->stats->span; else span = 1; } } else { if (hFindSplitTable(database, region->chrom, table, splitTableOrFileName, sizeof splitTableOrFileName, NULL)) { span = minSpan(conn, splitTableOrFileName, region->chrom, region->start, region->end, cart, track); wds->setSpanConstraint(wds, span); valuesMatched = getWigglePossibleIntersection(wds, region, database, table2, &intersectBedList, splitTableOrFileName, operations); if (intersectBedList) span = 1; } } /* when doing multiple regions, we need to print out each result as * it happens to keep the connection open to the browser and * prevent any timeout since this could take a while. * (worst case test is quality track on panTro1) */ if (wds->stats) statsItemCount += wds->stats->count; if (wds->stats && (regionCount > 1) && (valuesMatched > 0)) { double sumData = wds->stats->mean * wds->stats->count; double sumSquares; if (wds->stats->count > 1) sumSquares = (wds->stats->variance * (wds->stats->count - 1)) + ((sumData * sumData)/wds->stats->count); else sumSquares = sumData * sumData; /* global accumulators for overall summary */ statsSpan = wds->stats->span; statsSumData += sumData; statsSumSquares += sumSquares; if (wds->stats->lowerLimit < lowerLimit) lowerLimit = wds->stats->lowerLimit; if ((wds->stats->lowerLimit + wds->stats->dataRange) > upperLimit) upperLimit = wds->stats->lowerLimit + wds->stats->dataRange; if (statsHeaderDone) wds->statsOut(wds, database, "stdout", TRUE, TRUE, FALSE, TRUE); else { wds->statsOut(wds, database, "stdout", TRUE, TRUE, TRUE, TRUE); statsHeaderDone = TRUE; } wds->freeStats(wds); gotSome = TRUE; } if ((regionCount > MAX_REGION_DISPLAY) && (regionsDone >= MAX_REGION_DISPLAY)) { hPrintf("<TR><TH ALIGN=CENTER COLSPAN=12> Can not display more " "than %d regions, <BR> would take too much time </TH></TR>\n", MAX_REGION_DISPLAY); break; /* exit this for loop */ } } /*for (region = regionList; region != NULL; region = region->next) */ if (hasConstraint) freeMem(dataConstraint); /* been cloned into wds */ if (1 == regionCount) { statsPreamble(wds, regionList->chrom, regionList->start, regionList->end, span, valuesMatched, table2); /* 3 X TRUE = sort results, html table output, with header, * the FALSE means close the table after printing, no more rows to * come. The case in the if() statement was already taken care of * in the statsPreamble() printout. No need to do that again. */ if ( ! ((valuesMatched == 0) && table2) ) wds->statsOut(wds, database, "stdout", TRUE, TRUE, TRUE, FALSE); regionSize = basesInRegion(regionList,0); gapTotal = gapsInRegion(conn, regionList,0); } else { /* this is a bit of a kludge here since these printouts are done in the * library source wigDataStream.c statsOut() function and * this is a clean up of that. That function should be * pulled out of there and made independent and more * versatile. */ long long realSize; double variance; double stddev; /* Too expensive to lookup the numbers for thousands of regions */ regionSize = basesInRegion(regionList,MAX_REGION_DISPLAY); gapTotal = gapsInRegion(conn, regionList,MAX_REGION_DISPLAY); realSize = regionSize - gapTotal; /* close the table which was left open in the loop above */ if (!gotSome) hPrintf("<TR><TH ALIGN=CENTER COLSPAN=12> No data found matching this request </TH></TR>\n"); hPrintf("<TR><TH ALIGN=LEFT> SUMMARY: </TH>\n"); hPrintf("\t<TD> </TD>\n"); /* chromStart */ hPrintf("\t<TD> </TD>\n"); /* chromEnd */ hPrintf("\t<TD ALIGN=RIGHT> "); printLongWithCommas(stdout, statsItemCount); hPrintf(" </TD>\n" ); hPrintf("\t<TD ALIGN=RIGHT> %d </TD>\n", statsSpan); hPrintf("\t<TD ALIGN=RIGHT> "); printLongWithCommas(stdout, statsItemCount*statsSpan); hPrintf(" (%.2f%%) </TD>\n", 100.0*(double)(statsItemCount*statsSpan)/(double)realSize); hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", lowerLimit); hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", upperLimit); hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", upperLimit - lowerLimit); if (statsItemCount > 0) hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", statsSumData/statsItemCount); else hPrintf("\t<TD ALIGN=RIGHT> 0.0 </TD>\n"); stddev = 0.0; variance = 0.0; if (statsItemCount > 1) { variance = (statsSumSquares - ((statsSumData * statsSumData)/(double) statsItemCount)) / (double) (statsItemCount - 1); if (variance > 0.0) stddev = sqrt(variance); } hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", variance); hPrintf("\t<TD ALIGN=RIGHT> %g </TD>\n", stddev); hPrintf("</TR>\n"); wigStatsTableHeading(stdout, TRUE); hPrintf("</TABLE></TD></TR></TABLE></P>\n"); } #if defined(NOT) /* can't do the histogram now, that operation times out */ /* Single region, we can do the histogram */ if ((valuesMatched > 1) && (1 == regionCount)) { float *valuesArray = NULL; size_t valueCount = 0; struct histoResult *histoGramResult; /* convert the ascii data listings to one giant float array */ valuesArray = wds->asciiToDataArray(wds, valuesMatched, &valueCount); /* histoGram() may return NULL if it doesn't work */ histoGramResult = histoGram(valuesArray, valueCount, NAN, (unsigned) 0, NAN, (float) wds->stats->lowerLimit, (float) (wds->stats->lowerLimit + wds->stats->dataRange), (struct histoResult *)NULL); printHistoGram(histoGramResult, TRUE); /* TRUE == html output */ freeHistoGram(&histoGramResult); wds->freeAscii(wds); wds->freeArray(wds); } #endif wds->freeStats(wds); wiggleDataStreamFree(&wds); wigFetchTime = clock1000() - startTime; webNewSection("Region and Timing Statistics"); hTableStart(); stringStatRow("region", regionName); numberStatRow("bases in region", regionSize); numberStatRow("bases in gaps", gapTotal); floatStatRow("load and calc time", 0.001*wigFetchTime); wigFilterStatRow(conn); stringStatRow("intersection", cartUsualString(cart, hgtaIntersectTable, "off")); hTableEnd(); htmlClose(); } /* void doSummaryStatsWiggle(struct sqlConnection *conn) */
static void doOutWig(struct trackDb *track, char *table, struct sqlConnection *conn, enum wigOutputType wigOutType) { struct region *regionList = getRegions(), *region; int maxOut = 0, outCount, curOut = 0; char *shortLabel = table, *longLabel = table; if (track == NULL) errAbort("Sorry, can't find necessary track information for %s. " "If you reached this page by selecting \"All tables\" as the " "group, please go back and select the same table via a regular " "track group if possible.", table); maxOut = bigFileMaxOutput(); if (cartUsualBoolean(cart, hgtaDoGreatOutput, FALSE)) fputs("#", stdout); else textOpen(); if (track != NULL) { if (!sameString(track->table, table) && track->subtracks != NULL) { struct slRef *tdbRefList = trackDbListGetRefsToDescendantLeaves(track->subtracks); struct slRef *tdbRef; for (tdbRef = tdbRefList; tdbRef != NULL; tdbRef = tdbRef->next) { struct trackDb *tdb = tdbRef->val; if (sameString(tdb->table, table)) { track = tdb; break; } } slFreeList(&tdbRefList); } shortLabel = track->shortLabel; longLabel = track->longLabel; } wigDataHeader(shortLabel, longLabel, NULL, wigOutType); for (region = regionList; region != NULL; region = region->next) { int curMaxOut = maxOut - curOut; if (anySubtrackMerge(database, table)) outCount = mergedWigOutRegion(table, conn, region, curMaxOut, wigOutType); else if (startsWithWord("bedGraph", track->type)) outCount = bedGraphOutRegion(table, conn, region, curMaxOut, wigOutType); else if (startsWithWord("mathWig", track->type)) outCount = mathWigOutRegion(track, table, conn, region, curMaxOut, wigOutType); else if (startsWithWord("bigWig", track->type)) outCount = bigWigOutRegion(table, conn, region, curMaxOut, wigOutType); else outCount = wigOutRegion(table, conn, region, curMaxOut, wigOutType, NULL, 0); curOut += outCount; if (curOut >= maxOut) break; } if (curOut >= maxOut) errAbort("Reached output limit of %d data values, please make region smaller,\n\tor set a higher output line limit with the filter settings.", curOut); }
void bamTabOut(char *db, char *table, struct sqlConnection *conn, char *fields, FILE *f) /* Print out selected fields from BAM. If fields is NULL, then print out all fields. */ { struct hTableInfo *hti = NULL; hti = getHti(db, table, conn); struct hash *idHash = NULL; char *idField = getIdField(db, curTrack, table, hti); int idFieldNum = 0; /* if we know what field to use for the identifiers, get the hash of names */ if (idField != NULL) idHash = identifierHash(db, table); if (f == NULL) f = stdout; /* Convert comma separated list of fields to array. */ int fieldCount = chopByChar(fields, ',', NULL, 0); char **fieldArray; AllocArray(fieldArray, fieldCount); chopByChar(fields, ',', fieldArray, fieldCount); /* Get list of all fields in big bed and turn it into a hash of column indexes keyed by * column name. */ struct hash *fieldHash = hashNew(0); struct slName *bb, *bbList = bamGetFields(); int i; for (bb = bbList, i=0; bb != NULL; bb = bb->next, ++i) { /* if we know the field for identifiers, save it away */ if ((idField != NULL) && sameString(idField, bb->name)) idFieldNum = i; hashAddInt(fieldHash, bb->name, i); } /* Create an array of column indexes corresponding to the selected field list. */ int *columnArray; AllocArray(columnArray, fieldCount); for (i=0; i<fieldCount; ++i) { columnArray[i] = hashIntVal(fieldHash, fieldArray[i]); } /* Output row of labels */ fprintf(f, "#%s", fieldArray[0]); for (i=1; i<fieldCount; ++i) fprintf(f, "\t%s", fieldArray[i]); fprintf(f, "\n"); struct asObject *as = bamAsObj(); struct asFilter *filter = NULL; if (anyFilter()) { filter = asFilterFromCart(cart, db, table, as); if (filter) { fprintf(f, "# Filtering on %d columns\n", slCount(filter->columnList)); } } /* Loop through outputting each region */ struct region *region, *regionList = getRegions(); int maxOut = bigFileMaxOutput(); for (region = regionList; region != NULL && (maxOut > 0); region = region->next) { struct lm *lm = lmInit(0); char *fileName = bamFileName(table, conn, region->chrom); struct samAlignment *sam, *samList = bamFetchSamAlignment(fileName, region->chrom, region->start, region->end, lm); char *row[SAMALIGNMENT_NUM_COLS]; char numBuf[BAM_NUM_BUF_SIZE]; for (sam = samList; sam != NULL && (maxOut > 0); sam = sam->next) { samAlignmentToRow(sam, numBuf, row); if (asFilterOnRow(filter, row)) { /* if we're looking for identifiers, check if this matches */ if ((idHash != NULL)&&(hashLookup(idHash, row[idFieldNum]) == NULL)) continue; int i; fprintf(f, "%s", row[columnArray[0]]); for (i=1; i<fieldCount; ++i) fprintf(f, "\t%s", row[columnArray[i]]); fprintf(f, "\n"); maxOut --; } } freeMem(fileName); lmCleanup(&lm); } if (maxOut == 0) warn("Reached output limit of %d data values, please make region smaller,\n\tor set a higher output line limit with the filter settings.", bigFileMaxOutput()); /* Clean up and exit. */ hashFree(&fieldHash); freeMem(fieldArray); freeMem(columnArray); }
CoverMap::CoverMap() { w = Broodwar->mapWidth(); h = Broodwar->mapHeight(); range = 30; Unit* worker = findWorker(); cover_map = new int*[w]; for(int i = 0 ; i < w ; i++) { cover_map[i] = new int[h]; //Fill from static map and Region connectability for (int j = 0; j < h; j++) { int ok = BUILDABLE; if (!Broodwar->isBuildable(i, j)) { ok = BLOCKED; } cover_map[i][j] = ok; } } //Fill from current agents vector<BaseAgent*> agents = AgentManager::getInstance()->getAgents(); for (int i = 0; i < (int)agents.size(); i++) { BaseAgent* agent = agents.at(i); if (agent->isBuilding()) { Corners c = getCorners(agent->getUnit()); fill(c); } } //Fill from minerals for(set<Unit*>::iterator m = Broodwar->getMinerals().begin(); m != Broodwar->getMinerals().end(); m++) { Corners c; c.x1 = (*m)->getTilePosition().x() - 1; c.y1 = (*m)->getTilePosition().y() - 1; c.x2 = (*m)->getTilePosition().x() + 2; c.y2 = (*m)->getTilePosition().y() + 1; fill(c); cover_map[c.x1+2][c.y1+2] = MINERAL; } //Fill from gas for(set<Unit*>::iterator m = Broodwar->getGeysers().begin(); m != Broodwar->getGeysers().end(); m++) { Corners c; c.x1 = (*m)->getTilePosition().x() - 2; c.y1 = (*m)->getTilePosition().y() - 2; c.x2 = (*m)->getTilePosition().x() + 5; c.y2 = (*m)->getTilePosition().y() + 3; fill(c); cover_map[c.x1+2][c.y1+2] = GAS; } //Fill from narrow chokepoints if (analyzed) { for(set<Region*>::const_iterator i=getRegions().begin();i!=getRegions().end();i++) { for(set<Chokepoint*>::const_iterator c=(*i)->getChokepoints().begin();c!=(*i)->getChokepoints().end();c++) { if ((*c)->getWidth() <= 4 * 32) { TilePosition center = TilePosition((*c)->getCenter()); Corners c; c.x1 = center.x() - 1; c.x2 = center.x() + 1; c.y1 = center.y() - 1; c.y2 = center.y() + 1; fill(c); } } } } mapData = MapDataReader(); mapData.readMap(); }
TilePosition ExplorationManager::getNextToExplore(Squad* squad) { TilePosition curPos = squad->getCenter(); TilePosition goal = squad->getGoal(); //Special case: No goal set if (goal.x() == -1 || goal.y() == -1) { BWTA::Region* startRegion = getRegion(curPos); goal = TilePosition(startRegion->getCenter()); return goal; } double dist = curPos.getDistance(goal); double acceptDist = 4; if (squad->isGround()) { acceptDist = 6; } if (dist <= acceptDist) { //Squad is close to goal //1. Set region to explored setExplored(goal); //2. Find new region to explore BWTA::Region* startRegion = getRegion(goal); BWTA::Region* bestRegion = startRegion; if (bestRegion != NULL) { int bestLastVisitFrame = getLastVisitFrame(bestRegion); if (!squad->isAir()) { //Ground explorers for(set<BWTA::Region*>::const_iterator i=startRegion->getReachableRegions().begin();i!=startRegion->getReachableRegions().end();i++) { int cLastVisitFrame = getLastVisitFrame((*i)); TilePosition c = TilePosition((*i)->getCenter()); if (cLastVisitFrame <= bestLastVisitFrame) { bestLastVisitFrame = cLastVisitFrame; bestRegion = (*i); } } } else { //Air explorers double bestDist = 100000; for(set<BWTA::Region*>::const_iterator i=getRegions().begin();i!=getRegions().end();i++) { int cLastVisitFrame = getLastVisitFrame((*i)); TilePosition c = TilePosition((*i)->getCenter()); double dist = c.getDistance(curPos); if (cLastVisitFrame < bestLastVisitFrame) { bestLastVisitFrame = cLastVisitFrame; bestRegion = (*i); bestDist = dist; } if (cLastVisitFrame == bestLastVisitFrame && dist < bestDist) { bestLastVisitFrame = cLastVisitFrame; bestRegion = (*i); bestDist = dist; } } } TilePosition newGoal = TilePosition(bestRegion->getCenter()); return newGoal; //Broodwar->printf("Explorer: new goal (%d,%d) I am at (%d,%d) agentGoal (%d,%d)", newGoal.x(), newGoal.y(), curPos.x(), curPos.y(), agent->getGoal().x(), agent->getGoal().y()); } } return TilePosition(-1, -1); }
void doSummaryStatsBed(struct sqlConnection *conn) /* Put up page showing summary stats for track that is in database * or that is bed-format custom. */ { struct bed *bedList = NULL; struct region *regionList = getRegions(), *region; char *regionName = getRegionName(); long long regionSize = 0, gapTotal = 0, realSize = 0; long startTime, midTime, endTime; long loadTime = 0, calcTime = 0, freeTime = 0; struct covStats *itemCovList = NULL, *blockCovList = NULL, *cov; int itemCount = 0; struct hTableInfo *hti = getHti(database, curTable, conn); int minScore = BIGNUM, maxScore = -BIGNUM; long long sumScores = 0; boolean hasBlocks = hti->hasBlocks; boolean hasScore = (hti->scoreField[0] != 0); int fieldCount; htmlOpen("%s (%s) Summary Statistics", curTableLabel(), curTable); for (region = regionList; region != NULL; region = region->next) { struct lm *lm = lmInit(64*1024); startTime = clock1000(); bedList = cookedBedList(conn, curTable, region, lm, &fieldCount); if (fieldCount < 12) hasBlocks = FALSE; if (fieldCount < 5) hasScore = FALSE; midTime = clock1000(); loadTime += midTime - startTime; if (bedList != NULL) { itemCount += slCount(bedList); regionSize += region->end - region->start; cov = calcSpanOverRegion(region, bedList); slAddHead(&itemCovList, cov); if (hasBlocks) { cov = calcBlocksOverRegion(region, bedList); slAddHead(&blockCovList, cov); } if (hti->scoreField[0] != 0) { struct bed *bed; for (bed = bedList; bed != NULL; bed = bed->next) { int score = bed->score; if (score < minScore) minScore = score; if (score > maxScore) maxScore = score; sumScores += score; } } } endTime = clock1000(); calcTime += endTime - midTime; lmCleanup(&lm); bedList = NULL; freeTime += clock1000() - endTime; } regionSize = basesInRegion(regionList, 0); gapTotal = gapsInRegion(conn, regionList, 0); realSize = regionSize - gapTotal; hTableStart(); startTime = clock1000(); numberStatRow("item count", itemCount); if (itemCount > 0) { cov = covStatsSum(itemCovList); percentStatRow("item bases", cov->basesCovered, realSize); percentStatRow("item total", cov->sumBases, realSize); numberStatRow("smallest item", cov->minBases); numberStatRow("average item", round((double)cov->sumBases/cov->itemCount)); numberStatRow("biggest item", cov->maxBases); } if (hasBlocks && itemCount > 0) { cov = covStatsSum(blockCovList); hPrintf("<TR><TD>block count</TD><TD ALIGN=RIGHT>"); printLongWithCommas(stdout, cov->itemCount); hPrintf("</TD></TR>\n"); percentStatRow("block bases", cov->basesCovered, realSize); percentStatRow("block total", cov->sumBases, realSize); numberStatRow("smallest block", cov->minBases); numberStatRow("average block", round((double)cov->sumBases/cov->itemCount)); numberStatRow("biggest block", cov->maxBases); } if (hasScore != 0 && itemCount > 0 && sumScores != 0) { numberStatRow("smallest score", minScore); numberStatRow("average score", round((double)sumScores/itemCount)); numberStatRow("biggest score", maxScore); } hTableEnd(); /* Show region and time stats part of stats page. */ webNewSection("Region and Timing Statistics"); hTableStart(); stringStatRow("region", regionName); numberStatRow("bases in region", regionSize); numberStatRow("bases in gaps", gapTotal); floatStatRow("load time", 0.001*loadTime); floatStatRow("calculation time", 0.001*calcTime); floatStatRow("free memory time", 0.001*freeTime); stringStatRow("filter", (anyFilter() ? "on" : "off")); stringStatRow("intersection", (anyIntersection() ? "on" : "off")); hTableEnd(); covStatsFreeList(&itemCovList); covStatsFreeList(&blockCovList); htmlClose(); }
void doSummaryStatsBigWig(struct sqlConnection *conn) /* Put up page showing summary stats for bigWig track. */ { struct trackDb *track = curTrack; char *table = curTable; char *shortLabel = (track == NULL ? table : track->shortLabel); char *fileName = bigWigFileName(table, conn); long startTime = clock1000(); htmlOpen("%s (%s) Big Wig Summary Statistics", shortLabel, table); if (anySubtrackMerge(database, curTable)) hPrintf("<P><EM><B>Note:</B> subtrack merge is currently ignored on this " "page (not implemented yet). Statistics shown here are only for " "the primary table %s (%s).</EM>", shortLabel, table); struct bbiFile *bwf = bigWigFileOpen(fileName); struct region *region, *regionList = getRegions(); double sumData = 0, sumSquares = 0, minVal = 0, maxVal = 0; bits64 validCount = 0; if (!anyFilter() && !anyIntersection()) { for (region = regionList; region != NULL; region = region->next) { struct bbiSummaryElement sum; if (bbiSummaryArrayExtended(bwf, region->chrom, region->start, region->end, bigWigIntervalQuery, 1, &sum)) { if (validCount == 0) { minVal = sum.minVal; maxVal = sum.maxVal; } else { if (sum.minVal < minVal) minVal = sum.minVal; if (sum.maxVal > maxVal) maxVal = sum.maxVal; } sumData += sum.sumData; sumSquares += sum.sumSquares; validCount += sum.validCount; } } } else { double ll, ul; enum wigCompare cmp; getWigFilter(database, curTable, &cmp, &ll, &ul); for (region = regionList; region != NULL; region = region->next) { struct lm *lm = lmInit(0); struct bbiInterval *iv, *ivList; ivList = intersectedFilteredBbiIntervalsOnRegion(conn, bwf, region, cmp, ll, ul, lm); for (iv = ivList; iv != NULL; iv = iv->next) { double val = iv->val; double size = iv->end - iv->start; if (validCount == 0) minVal = maxVal = val; else { if (val < minVal) minVal = val; if (val > maxVal) maxVal = val; } sumData += size*val; sumSquares += size*val*val; validCount += size; } lmCleanup(&lm); } } hTableStart(); floatStatRow("mean", sumData/validCount); floatStatRow("min", minVal); floatStatRow("max", maxVal); floatStatRow("standard deviation", calcStdFromSums(sumData, sumSquares, validCount)); numberStatRow("bases with data", validCount); long long regionSize = basesInRegion(regionList,0); long long gapTotal = gapsInRegion(conn, regionList,0); numberStatRow("bases with sequence", regionSize - gapTotal); numberStatRow("bases in region", regionSize); wigFilterStatRow(conn); stringStatRow("intersection", cartUsualString(cart, hgtaIntersectTable, "off")); long wigFetchTime = clock1000() - startTime; floatStatRow("load and calc time", 0.001*wigFetchTime); hTableEnd(); bbiFileClose(&bwf); htmlClose(); }
void tabOutSelectedFields( char *primaryDb, /* The primary database. */ char *primaryTable, /* The primary table. */ FILE *f, /* file for output, null for stdout */ struct slName *fieldList) /* List of db.table.field */ /* Do tab-separated output on selected fields, which may * or may not include multiple tables. */ { struct joinerDtf *dtfList = NULL; struct joinerDtf *filterTables = NULL; boolean doJoin = joinRequired(primaryDb, primaryTable, fieldList, &dtfList, &filterTables); if (! doJoin) { struct sqlConnection *conn = hAllocConn(dtfList->database); struct dyString *dy = dyStringNew(0); if (hIsBigBed(database, dtfList->table, NULL, ctLookupName)) makeBigBedOrderedCommaFieldList(dtfList, dy); else if (isCustomTrack(dtfList->table)) makeCtOrderedCommaFieldList(dtfList, dy); else makeDbOrderedCommaFieldList(conn, dtfList->table, dtfList, dy); doTabOutTable(dtfList->database, dtfList->table, f, conn, dy->string); hFreeConn(&conn); } else { struct joiner *joiner = allJoiner; struct joinedTables *joined = joinedTablesCreate(joiner, primaryDb, primaryTable, dtfList, filterTables, 1000000, getRegions()); if (f == NULL) joinedTablesTabOut(joined); else joinedTablesTabOutFile(joined, f); joinedTablesFree(&joined); } joinerDtfFreeList(&dtfList); joinerDtfFreeList(&filterTables); }
static DetectedFace *postProcessing(DetectedFace *faces, int size, int *newsize) { int size1 = 0; DetectedFace *trueFaces1 = malloc(size * sizeof(DetectedFace)); int *regionsInfos; DetectedFace **regions = getRegions(faces, size, ®ionsInfos); int nbRegions = *regionsInfos; double *confidenceVals = malloc(nbRegions * sizeof(double)); for(int i = 0; i < nbRegions; ++i) { DetectedFace *region = regions[i]; int nbFacesInRegion = regionsInfos[i+1]; double confidence = (double)nbFacesInRegion / (double)region->w; if(confidence > CONFIDENCE_THRESHOLD) { trueFaces1[size1] = region[nbFacesInRegion/2]; confidenceVals[size1++] = confidence; } } sortFacesBySize(trueFaces1, size1); DetectedFace *trueFaces2 = malloc(size1 * sizeof(DetectedFace)); int size2 = 0; for(int i = 0; i < size1; ++i) { DetectedFace face = trueFaces1[i]; int faceX = face.x + face.w/2; int faceY = face.y + face.h/2; for(int j = i+1; j < size1; ++j) { DetectedFace face2 = trueFaces1[j]; if(face2.x >= 0 && faceX > face2.x && faceX < face2.x + face2.w && faceY > face2.y && faceY < face2.y + face2.h) { if(confidenceVals[i] > confidenceVals[j]) { trueFaces1[j].x = -1; } else { trueFaces1[i].x = -1; break; } } } } for(int i = 0; i < size1; ++i) { if(trueFaces1[i].x >= 0) trueFaces2[size2++] = trueFaces1[i]; } *newsize = size2; return trueFaces2; }
void doGenePredNongenomic(struct sqlConnection *conn, int typeIx) /* Get mrna or protein associated with selected genes. */ { /* Note this does do the whole genome at once rather than one * chromosome at a time, but that's ok because the gene prediction * tracks this serves are on the small side. */ char *typeWords[3]; char *table; struct lm *lm = lmInit(64*1024); int fieldCount; struct bed *bed, *bedList = cookedBedsOnRegions(conn, curTable, getRegions(), lm, &fieldCount); int typeWordCount; textOpen(); /* Figure out which table to use. */ if (isRefGeneTrack(curTable)) { if (typeIx == 1) /* Protein */ doRefGeneProteinSequence(conn, bedList); else doRefGeneMrnaSequence(conn, bedList); } else { char *dupType = cloneString(findTypeForTable(database, curTrack, curTable, ctLookupName)); typeWordCount = chopLine(dupType, typeWords); if (typeIx >= typeWordCount) internalErr(); table = typeWords[typeIx]; if (sqlTableExists(conn, table)) { struct sqlResult *sr; char **row; char query[256]; struct hash *hash = newHash(18); boolean gotResults = FALSE; /* Make hash of all id's passing filters. */ for (bed = bedList; bed != NULL; bed = bed->next) hashAdd(hash, bed->name, NULL); /* Scan through table, outputting ones that match. */ sqlSafef(query, sizeof(query), "select name, seq from %s", table); sr = sqlGetResult(conn, query); while ((row = sqlNextRow(sr)) != NULL) { if (hashLookup(hash, row[0])) { hPrintf(">%s\n", row[0]); writeSeqWithBreaks(stdout, row[1], strlen(row[1]), 60); gotResults = TRUE; } } sqlFreeResult(&sr); hashFree(&hash); if (!gotResults) hPrintf(NO_RESULTS); } else { internalErr(); } freez(&dupType); } lmCleanup(&lm); }
void vcfTabOut(char *db, char *table, struct sqlConnection *conn, char *fields, FILE *f, boolean isTabix) /* Print out selected fields from VCF. If fields is NULL, then print out all fields. */ { struct hTableInfo *hti = NULL; hti = getHti(db, table, conn); struct hash *idHash = NULL; char *idField = getIdField(db, curTrack, table, hti); int idFieldNum = 0; /* if we know what field to use for the identifiers, get the hash of names */ if (idField != NULL) idHash = identifierHash(db, table); if (f == NULL) f = stdout; /* Convert comma separated list of fields to array. */ int fieldCount = chopByChar(fields, ',', NULL, 0); char **fieldArray; AllocArray(fieldArray, fieldCount); chopByChar(fields, ',', fieldArray, fieldCount); /* Get list of all fields in big bed and turn it into a hash of column indexes keyed by * column name. */ struct hash *fieldHash = hashNew(0); struct slName *bb, *bbList = vcfGetFields(); int i; for (bb = bbList, i=0; bb != NULL; bb = bb->next, ++i) { /* if we know the field for identifiers, save it away */ if ((idField != NULL) && sameString(idField, bb->name)) idFieldNum = i; hashAddInt(fieldHash, bb->name, i); } /* Create an array of column indexes corresponding to the selected field list. */ int *columnArray; AllocArray(columnArray, fieldCount); for (i=0; i<fieldCount; ++i) { columnArray[i] = hashIntVal(fieldHash, fieldArray[i]); } // If we are outputting a subset of fields, invalidate the VCF header. boolean allFields = (fieldCount == VCFDATALINE_NUM_COLS); if (!allFields) fprintf(f, "# Only selected columns are included below; output is not valid VCF.\n"); struct asObject *as = vcfAsObj(); struct asFilter *filter = NULL; if (anyFilter()) filter = asFilterFromCart(cart, db, table, as); /* Loop through outputting each region */ struct region *region, *regionList = getRegions(); int maxOut = bigFileMaxOutput(); struct trackDb *tdb = hashFindVal(fullTableToTdbHash, table); // Include the header, absolutely necessary for VCF parsing. boolean printedHeader = FALSE; // Temporary storage for row-ification: struct dyString *dyAlt = newDyString(1024); struct dyString *dyFilter = newDyString(1024); struct dyString *dyInfo = newDyString(1024); struct dyString *dyGt = newDyString(1024); struct vcfRecord *rec; for (region = regionList; region != NULL && (maxOut > 0); region = region->next) { char *fileName = vcfFileName(tdb, conn, table, region->chrom); struct vcfFile *vcff; if (isTabix) vcff = vcfTabixFileMayOpen(fileName, region->chrom, region->start, region->end, 100, maxOut); else vcff = vcfFileMayOpen(fileName, region->chrom, region->start, region->end, 100, maxOut, TRUE); if (vcff == NULL) noWarnAbort(); // If we are outputting all fields, but this VCF has no genotype info, omit the // genotype columns from output: if (allFields && vcff->genotypeCount == 0) fieldCount = VCFDATALINE_NUM_COLS - 2; if (!printedHeader) { fprintf(f, "%s", vcff->headerString); if (filter) fprintf(f, "# Filtering on %d columns\n", slCount(filter->columnList)); if (!allFields) { fprintf(f, "#%s", fieldArray[0]); for (i=1; i<fieldCount; ++i) fprintf(f, "\t%s", fieldArray[i]); fprintf(f, "\n"); } printedHeader = TRUE; } char *row[VCFDATALINE_NUM_COLS]; char numBuf[VCF_NUM_BUF_SIZE]; for (rec = vcff->records; rec != NULL && (maxOut > 0); rec = rec->next) { vcfRecordToRow(rec, region->chrom, numBuf, dyAlt, dyFilter, dyInfo, dyGt, row); if (asFilterOnRow(filter, row)) { /* if we're looking for identifiers, check if this matches */ if ((idHash != NULL) && (hashLookup(idHash, row[idFieldNum]) == NULL)) continue; // All fields output: after asFilter'ing, preserve original VCF chrom if (allFields && !sameString(rec->chrom, region->chrom)) row[0] = rec->chrom; int i; fprintf(f, "%s", row[columnArray[0]]); for (i=1; i<fieldCount; ++i) { fprintf(f, "\t%s", row[columnArray[i]]); } fprintf(f, "\n"); maxOut --; } } vcfFileFree(&vcff); freeMem(fileName); } if (maxOut == 0) warn("Reached output limit of %d data values, please make region smaller,\n\tor set a higher output line limit with the filter settings.", bigFileMaxOutput()); /* Clean up and exit. */ dyStringFree(&dyAlt); dyStringFree(&dyFilter); dyStringFree(&dyInfo); dyStringFree(&dyGt); hashFree(&fieldHash); freeMem(fieldArray); freeMem(columnArray); }