struct snoRNAs *snoRNAsLoad(char **row) /* Load a snoRNAs from row fetched with select * from snoRNAs * from database. Dispose of this with snoRNAsFree(). */ { struct snoRNAs *ret; AllocVar(ret); ret->chrom = cloneString(row[0]); ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = cloneString(row[3]); ret->score = sqlUnsigned(row[4]); strcpy(ret->strand, row[5]); ret->snoScore = sqlFloat(row[6]); ret->targetList = cloneString(row[7]); ret->orthologs = cloneString(row[8]); ret->guideLen = cloneString(row[9]); ret->guideStr = cloneString(row[10]); ret->guideScore = cloneString(row[11]); ret->cBox = cloneString(row[12]); ret->dBox = cloneString(row[13]); ret->cpBox = cloneString(row[14]); ret->dpBox = cloneString(row[15]); ret->hmmScore = sqlFloat(row[16]); ret->snoscanOutput = cloneString(row[17]); return ret; }
struct encodePeak *encodePeakLoad(char **row) /* Load a encodePeak from row fetched with select * from encodePeak * from database. Dispose of this with encodePeakFree(). */ { struct encodePeak *ret; AllocVar(ret); ret->blockCount = sqlUnsigned(row[10]); ret->chrom = cloneString(row[0]); ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = cloneString(row[3]); ret->score = sqlUnsigned(row[4]); safecpy(ret->strand, sizeof(ret->strand), row[5]); ret->signalValue = sqlFloat(row[6]); ret->pValue = sqlFloat(row[7]); ret->qValue = sqlFloat(row[8]); ret->peak = sqlSigned(row[9]); { int sizeOne; sqlUnsignedDynamicArray(row[11], &ret->blockSizes, &sizeOne); assert(sizeOne == ret->blockCount); } { int sizeOne; sqlUnsignedDynamicArray(row[12], &ret->blockStarts, &sizeOne); assert(sizeOne == ret->blockCount); } return ret; }
void lowelabArkinOperonScoreStaticLoad(char **row, struct lowelabArkinOperonScore *ret) /* Load a row from lowelabArkinOperonScore table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->name = row[0]; ret->gene1 = row[1]; ret->gene2 = row[2]; ret->prob = sqlFloat(row[3]); ret->gnMinus = sqlFloat(row[4]); }
void transMapSrcStaticLoad(char **row, struct transMapSrc *ret) /* Load a row from transMapSrc table into ret. The contents of ret will * be replaced at the next call to this function. */ { safecpy(ret->db, sizeof(ret->db), row[0]); ret->id = row[1]; ret->chrom = row[2]; ret->chromStart = sqlUnsigned(row[3]); ret->chromEnd = sqlUnsigned(row[4]); ret->strand = row[5][0]; ret->ident = sqlFloat(row[6]); ret->aligned = sqlFloat(row[7]); }
struct lowelabArkinOperonScore *lowelabArkinOperonScoreLoad(char **row) /* Load a lowelabArkinOperonScore from row fetched with select * from lowelabArkinOperonScore * from database. Dispose of this with lowelabArkinOperonScoreFree(). */ { struct lowelabArkinOperonScore *ret; AllocVar(ret); ret->name = cloneString(row[0]); ret->gene1 = cloneString(row[1]); ret->gene2 = cloneString(row[2]); ret->prob = sqlFloat(row[3]); ret->gnMinus = sqlFloat(row[4]); return ret; }
void polyGenotypeStaticLoad(char **row, struct polyGenotype *ret) /* Load a row from polyGenotype table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->name = row[0]; ret->ethnicGroup = row[1]; ret->plusPlus = sqlSigned(row[2]); ret->plusMinus = sqlSigned(row[3]); ret->minusMinus = sqlSigned(row[4]); ret->sampleSize = sqlSigned(row[5]); ret->alleleFrequency = sqlFloat(row[6]); ret->unbiasedHeterozygosity = sqlFloat(row[7]); }
struct transMapSrc *transMapSrcLoad(char **row) /* Load a transMapSrc from row fetched with select * from transMapSrc * from database. Dispose of this with transMapSrcFree(). */ { struct transMapSrc *ret; AllocVar(ret); safecpy(ret->db, sizeof(ret->db), row[0]); ret->id = cloneString(row[1]); ret->chrom = cloneString(row[2]); ret->chromStart = sqlUnsigned(row[3]); ret->chromEnd = sqlUnsigned(row[4]); ret->strand = row[5][0]; ret->ident = sqlFloat(row[6]); ret->aligned = sqlFloat(row[7]); return ret; }
struct polyGenotype *polyGenotypeLoad(char **row) /* Load a polyGenotype from row fetched with select * from polyGenotype * from database. Dispose of this with polyGenotypeFree(). */ { struct polyGenotype *ret; AllocVar(ret); ret->name = cloneString(row[0]); ret->ethnicGroup = cloneString(row[1]); ret->plusPlus = sqlSigned(row[2]); ret->plusMinus = sqlSigned(row[3]); ret->minusMinus = sqlSigned(row[4]); ret->sampleSize = sqlSigned(row[5]); ret->alleleFrequency = sqlFloat(row[6]); ret->unbiasedHeterozygosity = sqlFloat(row[7]); return ret; }
void bed5PvalStaticLoad(char **row, struct bed5Pval *ret) /* Load a row from bed5Pval table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->chrom = row[0]; ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = row[3]; ret->score = sqlSigned(row[4]); ret->pValue = sqlFloat(row[5]); }
struct autoTest *autoTestLoad(char **row) /* Load a autoTest from row fetched with select * from autoTest * from database. Dispose of this with autoTestFree(). */ { struct autoTest *ret; AllocVar(ret); ret->ptCount = sqlSigned(row[5]); ret->difCount = sqlSigned(row[7]); ret->valCount = sqlSigned(row[10]); ret->id = sqlUnsigned(row[0]); safecpy(ret->shortName, sizeof(ret->shortName), row[1]); ret->longName = cloneString(row[2]); { char *s = cloneString(row[3]); sqlStringArray(s, ret->aliases, 3); } { char *s = row[4]; if(s != NULL && differentString(s, "")) ret->threeD = pointCommaIn(&s, NULL); } { int sizeOne; sqlShortDynamicArray(row[6], &ret->pts, &sizeOne); assert(sizeOne == ret->ptCount); } { int sizeOne; sqlUbyteDynamicArray(row[8], &ret->difs, &sizeOne); assert(sizeOne == ret->difCount); } sqlSignedArray(row[9], ret->xy, 2); { int sizeOne; sqlStringDynamicArray(row[11], &ret->vals, &sizeOne); assert(sizeOne == ret->valCount); } ret->dblVal = sqlDouble(row[12]); ret->fltVal = sqlFloat(row[13]); { int sizeOne; sqlDoubleDynamicArray(row[14], &ret->dblArray, &sizeOne); assert(sizeOne == ret->valCount); } { int sizeOne; sqlFloatDynamicArray(row[15], &ret->fltArray, &sizeOne); assert(sizeOne == ret->valCount); } return ret; }
void bToBeCfgStaticLoad(char **row, struct bToBeCfg *ret) /* Load a row from bToBeCfg table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->factor = row[0]; ret->source = row[1]; ret->sourceId = row[2]; ret->dataSource = row[3]; ret->scoreCol = sqlSigned(row[4]); ret->multiplier = sqlFloat(row[5]); ret->dataTable = row[6]; }
void hgGenericMicroarray(char *file) /* Load the simple file into two tables.*/ { struct expRecord *exps = NULL; struct expData *data = NULL; struct lineFile *lf = lineFileOpen(file,TRUE); char *strings[1000]; int ncols, i, n; /* First line has experiment info */ ncols = lineFileChopTab(lf,strings); for (i = 1; i < ncols; i++) { struct expRecord *oneRec = NULL; AllocVar(oneRec); oneRec->id = i - 1; oneRec->name = cloneString(strings[i]); oneRec->description = cloneString(strings[i]); oneRec->url = CSNA; oneRec->ref = CSNA; oneRec->credit = CSNA; oneRec->numExtras = 3; AllocArray(oneRec->extras, oneRec->numExtras); oneRec->extras[0] = cloneString("n/a"); oneRec->extras[1] = cloneString("n/a"); oneRec->extras[2] = cloneString(strings[i]); slAddHead(&exps, oneRec); } slReverse(&exps); /* Other lines have the data. */ while ((n = lineFileChopTab(lf,strings)) > 0) { struct expData *oneGene = NULL; AllocVar(oneGene); oneGene->name = cloneString(strings[0]); oneGene->expCount = ncols - 1; AllocArray(oneGene->expScores, oneGene->expCount); for (i = 1; i < ncols; i++) oneGene->expScores[i-1] = sqlFloat(strings[i]); slAddHead(&data, oneGene); } if ((n != ncols) && (n > 0)) errAbort("Wrong number of columns in line %d of %s. Got %d, want %d", lf->lineIx, file, n, ncols); slReverse(&data); saveExpsTable(exps); saveDataTable(data); expRecordFreeList(&exps); expDataFreeList(&data); lineFileClose(&lf); }
void snoRNAsStaticLoad(char **row, struct snoRNAs *ret) /* Load a row from snoRNAs table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->chrom = row[0]; ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = row[3]; ret->score = sqlUnsigned(row[4]); strcpy(ret->strand, row[5]); ret->snoScore = sqlFloat(row[6]); ret->targetList = row[7]; ret->orthologs = row[8]; ret->guideLen = row[9]; ret->guideStr = row[10]; ret->guideScore = row[11]; ret->cBox = row[12]; ret->dBox = row[13]; ret->cpBox = row[14]; ret->dpBox = row[15]; ret->hmmScore = sqlFloat(row[16]); ret->snoscanOutput = row[17]; }
struct bed5Pval *bed5PvalLoad(char **row) /* Load a bed5Pval from row fetched with select * from bed5Pval * from database. Dispose of this with bed5PvalFree(). */ { struct bed5Pval *ret; AllocVar(ret); ret->chrom = cloneString(row[0]); ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = cloneString(row[3]); ret->score = sqlSigned(row[4]); ret->pValue = sqlFloat(row[5]); return ret; }
struct bToBeCfg *bToBeCfgLoad(char **row) /* Load a bToBeCfg from row fetched with select * from bToBeCfg * from database. Dispose of this with bToBeCfgFree(). */ { struct bToBeCfg *ret; AllocVar(ret); ret->factor = cloneString(row[0]); ret->source = cloneString(row[1]); ret->sourceId = cloneString(row[2]); ret->dataSource = cloneString(row[3]); ret->scoreCol = sqlSigned(row[4]); ret->multiplier = sqlFloat(row[5]); ret->dataTable = cloneString(row[6]); return ret; }
void tRNAsStaticLoad(char **row, struct tRNAs *ret) /* Load a row from tRNAs table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->chrom = row[0]; ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = row[3]; ret->score = sqlUnsigned(row[4]); safecpy(ret->strand, sizeof(ret->strand), row[5]); ret->aa = row[6]; ret->ac = row[7]; ret->intron = row[8]; ret->trnaScore = sqlFloat(row[9]); ret->genomeUrl = row[10]; ret->trnaUrl = row[11]; }
void readFreq() { FILE *outputFileHandle = mustOpen("snpFreq.tab", "w"); FILE *logFileHandle = mustOpen("snpFreq.log", "w"); char query[512]; struct sqlConnection *conn = hAllocConn(); struct sqlResult *sr; char **row; struct hashEl *alleleHashEl = NULL; struct hashEl *snpHashEl = NULL; struct coords *coordsInstance = NULL; char snpName[32]; int bin = 0; safef(query, sizeof(query), "select snp_id, allele_id, freq from SNPAlleleFreq"); sr = sqlGetResult(conn, query); while ((row = sqlNextRow(sr)) != NULL) { alleleHashEl = hashLookup(alleleHash, row[1]); if (alleleHashEl == NULL) { fprintf(logFileHandle, "couldn't find allele_id %s\n", row[1]); continue; } safef(snpName, sizeof(snpName), "rs%s", row[0]); snpHashEl = hashLookup(snpHash, snpName); if (snpHashEl == NULL) { fprintf(logFileHandle, "skipping snp_id %s\n", row[0]); continue; } coordsInstance = (struct coords *)snpHashEl->val; /* could add bin here */ bin = hFindBin(coordsInstance->start, coordsInstance->end); fprintf(outputFileHandle, "%d\t%s\t%d\t%d\t%s\t%s\t%f\n", bin, coordsInstance->chrom, coordsInstance->start, coordsInstance->end, snpName, (char *)alleleHashEl->val, sqlFloat(row[2])); } carefulClose(&outputFileHandle); carefulClose(&logFileHandle); sqlFreeResult(&sr); hFreeConn(&conn); }
struct bedNamedScore *bedNamedScoreLoadNext(struct lineFile *lf) /* Takes in an open lineFile and reads out the next bedNamedScore line */ { char *row[6]; int rowSize = lineFileChopNext(lf, row, ArraySize(row)); if (rowSize == 0) return NULL; struct bedNamedScore *bg; AllocVar(bg); bg->chrom = cloneString(row[0]); bg->chromStart = sqlUnsigned(row[1]); bg->chromEnd = sqlUnsigned(row[2]); bg->name = cloneString(row[3]); bg->score = sqlFloat(row[4]); bg->strand = row[5][0]; return bg; }
struct tRNAs *tRNAsLoad(char **row) /* Load a tRNAs from row fetched with select * from tRNAs * from database. Dispose of this with tRNAsFree(). */ { struct tRNAs *ret; AllocVar(ret); ret->chrom = cloneString(row[0]); ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = cloneString(row[3]); ret->score = sqlUnsigned(row[4]); safecpy(ret->strand, sizeof(ret->strand), row[5]); ret->aa = cloneString(row[6]); ret->ac = cloneString(row[7]); ret->intron = cloneString(row[8]); ret->trnaScore = sqlFloat(row[9]); ret->genomeUrl = cloneString(row[10]); ret->trnaUrl = cloneString(row[11]); return ret; }
struct encodePeak *encodePeakGeneralLoad(char **row, enum encodePeakType pt) /* Make a new encodePeak and return it. */ { struct encodePeak *peak; if (!pt) return NULL; if (pt == encodePeak) return encodePeakLoad(row); AllocVar(peak); peak->chrom = cloneString(row[0]); peak->chromStart = sqlUnsigned(row[1]); peak->chromEnd = sqlUnsigned(row[2]); peak->name = cloneString(row[3]); peak->score = sqlUnsigned(row[4]); peak->peak = -1; safecpy(peak->strand, sizeof(peak->strand), row[5]); if ((pt == broadPeak) || (pt == narrowPeak)) { peak->signalValue = sqlFloat(row[6]); peak->pValue = sqlFloat(row[7]); peak->qValue = sqlFloat(row[8]); if (pt == narrowPeak) peak->peak = sqlSigned(row[9]); } else /* must be gappedPeak */ { int sizeOne; peak->blockCount = sqlUnsigned(row[9]); sqlUnsignedDynamicArray(row[10], &peak->blockSizes, &sizeOne); assert(sizeOne == peak->blockCount); sqlUnsignedDynamicArray(row[11], &peak->blockStarts, &sizeOne); assert(sizeOne == peak->blockCount); peak->signalValue = sqlFloat(row[12]); peak->pValue = sqlFloat(row[13]); peak->qValue = sqlFloat(row[14]); } return peak; }
struct bbiSummary *bedGraphWriteReducedOnceReturnReducedTwice(struct bbiChromUsage *usageList, int fieldCount, struct lineFile *lf, bits32 initialReduction, bits32 initialReductionCount, int zoomIncrement, int blockSize, int itemsPerSlot, boolean doCompress, struct lm *lm, FILE *f, bits64 *retDataStart, bits64 *retIndexStart, struct bbiSummaryElement *totalSum) /* Write out data reduced by factor of initialReduction. Also calculate and keep in memory * next reduction level. This is more work than some ways, but it keeps us from having to * keep the first reduction entirely in memory. */ { struct bbiSummary *twiceReducedList = NULL; bits32 doubleReductionSize = initialReduction * zoomIncrement; struct bbiChromUsage *usage = usageList; struct bbiSummary oneSummary, *sum = NULL; struct bbiBoundsArray *boundsArray, *boundsPt, *boundsEnd; boundsPt = AllocArray(boundsArray, initialReductionCount); boundsEnd = boundsPt + initialReductionCount; *retDataStart = ftell(f); writeOne(f, initialReductionCount); boolean firstRow = TRUE; struct bbiSumOutStream *stream = bbiSumOutStreamOpen(itemsPerSlot, f, doCompress); /* remove initial browser and track lines */ lineFileRemoveInitialCustomTrackLines(lf); for (;;) { /* Get next line of input if any. */ char *row[5]; int rowSize = lineFileChopNext(lf, row, ArraySize(row)); /* Output last section and break if at end of file. */ if (rowSize == 0 && sum != NULL) { bbiOutputOneSummaryFurtherReduce(sum, &twiceReducedList, doubleReductionSize, &boundsPt, boundsEnd, lm, stream); break; } /* Parse out row. */ char *chrom = row[0]; bits32 start = sqlUnsigned(row[1]); bits32 end = sqlUnsigned(row[2]); float val = sqlFloat(row[3]); /* Update total summary stuff. */ bits32 size = end-start; if (firstRow) { totalSum->validCount = size; totalSum->minVal = totalSum->maxVal = val; totalSum->sumData = val*size; totalSum->sumSquares = val*val*size; firstRow = FALSE; } else { totalSum->validCount += size; if (val < totalSum->minVal) totalSum->minVal = val; if (val > totalSum->maxVal) totalSum->maxVal = val; totalSum->sumData += val*size; totalSum->sumSquares += val*val*size; } /* If new chromosome output existing block. */ if (differentString(chrom, usage->name)) { usage = usage->next; bbiOutputOneSummaryFurtherReduce(sum, &twiceReducedList, doubleReductionSize, &boundsPt, boundsEnd, lm, stream); sum = NULL; } /* If start past existing block then output it. */ else if (sum != NULL && sum->end <= start) { bbiOutputOneSummaryFurtherReduce(sum, &twiceReducedList, doubleReductionSize, &boundsPt, boundsEnd, lm, stream); sum = NULL; } /* If don't have a summary we're working on now, make one. */ if (sum == NULL) { oneSummary.chromId = usage->id; oneSummary.start = start; oneSummary.end = start + initialReduction; if (oneSummary.end > usage->size) oneSummary.end = usage->size; oneSummary.minVal = oneSummary.maxVal = val; oneSummary.sumData = oneSummary.sumSquares = 0.0; oneSummary.validCount = 0; sum = &oneSummary; } /* Deal with case where might have to split an item between multiple summaries. This * loop handles all but the final affected summary in that case. */ while (end > sum->end) { verbose(3, "Splitting start %d, end %d, sum->start %d, sum->end %d\n", start, end, sum->start, sum->end); /* Fold in bits that overlap with existing summary and output. */ bits32 overlap = rangeIntersection(start, end, sum->start, sum->end); sum->validCount += overlap; if (sum->minVal > val) sum->minVal = val; if (sum->maxVal < val) sum->maxVal = val; sum->sumData += val * overlap; sum->sumSquares += val*val * overlap; bbiOutputOneSummaryFurtherReduce(sum, &twiceReducedList, doubleReductionSize, &boundsPt, boundsEnd, lm, stream); size -= overlap; /* Move summary to next part. */ sum->start = start = sum->end; sum->end = start + initialReduction; if (sum->end > usage->size) sum->end = usage->size; sum->minVal = sum->maxVal = val; sum->sumData = sum->sumSquares = 0.0; sum->validCount = 0; } /* Add to summary. */ sum->validCount += size; if (sum->minVal > val) sum->minVal = val; if (sum->maxVal < val) sum->maxVal = val; sum->sumData += val * size; sum->sumSquares += val*val * size; } bbiSumOutStreamClose(&stream); /* Write out 1st zoom index. */ int indexOffset = *retIndexStart = ftell(f); assert(boundsPt == boundsEnd); cirTreeFileBulkIndexToOpenFile(boundsArray, sizeof(boundsArray[0]), initialReductionCount, blockSize, itemsPerSlot, NULL, bbiBoundsArrayFetchKey, bbiBoundsArrayFetchOffset, indexOffset, f); freez(&boundsArray); slReverse(&twiceReducedList); return twiceReducedList; }
void hgGnfMicroarray(char *expTable, char *dataTable, char *atlasFile) /** Main function that does all the work for new-style*/ { struct lineFile *lf = lineFileOpen(atlasFile, TRUE); char *line; int i, wordCount, expCount; char **row; float *data; char *affyId; struct hash *hash = newHash(17); FILE *f = NULL; int dataCount = 0; /* Open Atlas file and use first line to create experiment table. */ if (!lineFileNextReal(lf, &line)) errAbort("%s is empty", lf->fileName); if (startsWith("Affy", line)) line += 4; if (startsWith("Gene Name", line)) line += 9; if (line[0] != '\t') errAbort("%s doesn't seem to be a new format atlas file", lf->fileName); expCount = lineToExpTable(line+1, expTable); if (expCount <= 0) errAbort("No experiments in %s it seems", lf->fileName); warn("%d experiments\n", expCount); f = hgCreateTabFile(tabDir, dataTable); AllocArray(row, expCount); AllocArray(data, expCount); while (lineFileNextReal(lf, &line)) { affyId = nextWord(&line); wordCount = chopByWhite(line, row, expCount); if (wordCount != expCount) errAbort("Expecting %d data points, got %d line %d of %s", expCount, wordCount, lf->lineIx, lf->fileName); if (chopName != NULL) { char *e = stringIn(chopName, affyId); if (e != NULL) *e = 0; } if (hashLookup(hash, affyId)) { warn("Duplicate %s, skipping all but first.", affyId); continue; } for (i=0; i<expCount; ++i) { data[i] = sqlFloat(row[i]); } shortDataOut(f, affyId, expCount, data); ++dataCount; if (limit != 0 && dataCount >= limit) break; } lineFileClose(&lf); if (doLoad) { struct sqlConnection *conn = sqlConnect(database); expDataCreateTable(conn, dataTable); hgLoadTabFile(conn, tabDir, dataTable, &f); hgRemoveTabFile(tabDir, dataTable); sqlDisconnect(&conn); } }
/* bedGraphLoadItems - an ordinary bed load, but we are interested * in only the chrom, start, end, and the graphColumn */ static void bedGraphLoadItems(struct track *tg) { struct sqlConnection *conn; struct sqlResult *sr = (struct sqlResult *) NULL; char **row = (char **)NULL; int rowOffset = 0; struct bedGraphItem *bgList = NULL; int itemsLoaded = 0; int colCount = 0; struct wigCartOptions *wigCart = (struct wigCartOptions *) tg->wigCartData; int graphColumn = 5; char *tableName; if(sameString(tg->table, "affyTranscription")) wigCart->colorTrack = "affyTransfrags"; graphColumn = wigCart->graphColumn; #ifndef GBROWSE if (isCustomTrack(tg->table) && tg->customPt) { struct customTrack *ct = (struct customTrack *) tg->customPt; tableName = ct->dbTableName; conn = hAllocConn(CUSTOM_TRASH); } else #endif /* GBROWSE */ { tableName = tg->table; conn = hAllocConnTrack(database, tg->tdb); } sr = hRangeQuery(conn, tableName, chromName, winStart, winEnd, NULL, &rowOffset); colCount = sqlCountColumns(sr) - rowOffset; /* Must have at least four good columns */ if (colCount < 4) errAbort("bedGraphLoadItems: table %s only has %d data columns, must be at least 4", tableName, colCount); if (colCount < graphColumn) errAbort("bedGraphLoadItems: table %s only has %d data columns, specified graph column %d does not exist", tableName, colCount, graphColumn); /* before loop, determine actual row[graphColumn] index */ graphColumn += (rowOffset - 1); while ((row = sqlNextRow(sr)) != NULL) { struct bedGraphItem *bg; struct bed *bed; ++itemsLoaded; /* load chrom, start, end */ bed = bedLoadN(row+rowOffset, 3); AllocVar(bg); bg->start = bed->chromStart; bg->end = bed->chromEnd; if ((colCount > 4) && ((graphColumn + rowOffset) != 4)) bg->name = cloneString(row[3+rowOffset]); else { char name[128]; safef(name,ArraySize(name),"%s.%d", bed->chrom, itemsLoaded); bg->name = cloneString(name); } bg->dataValue = sqlFloat(row[graphColumn]); /* filled in by DrawItems */ bg->graphUpperLimit = wigEncodeStartingUpperLimit; bg->graphLowerLimit = wigEncodeStartingLowerLimit; slAddHead(&bgList, bg); bedFree(&bed); } sqlFreeResult(&sr); hFreeConn(&conn); slReverse(&bgList); tg->items = bgList; } /* bedGraphLoadItems() */
static void loadDatabase(char *database, char *track, int bedSize, struct bedStub *bedList) /* Load database from bedList. */ { struct sqlConnection *conn; struct dyString *dy = newDyString(1024); char *tab = (char *)NULL; int loadOptions = (optionExists("onServer") ? SQL_TAB_FILE_ON_SERVER : 0); if ( ! noLoad ) conn = sqlConnect(database); if ((char *)NULL != tmpDir) tab = cloneString(rTempName(tmpDir,"loadBed",".tab")); else tab = cloneString("bed.tab"); if (bedDetail && sqlTable == NULL) errAbort("bedDetail format requires sqlTable option"); if (bedDetail && !strictTab) errAbort("bedDetail format must be tab separated"); if (bedDetail && !noBin) noBin = TRUE; /* First make table definition. */ if (sqlTable != NULL && !oldTable) { /* Read from file. */ char *sql, *s; readInGulp(sqlTable, &sql, NULL); /* Chop off end-of-statement semicolon if need be. */ s = strchr(sql, ';'); if (s != NULL) *s = 0; if ( !noLoad ) { if (renameSqlTable) { char *pos = stringIn("CREATE TABLE ", sql); if (pos == NULL) errAbort("Can't find CREATE TABLE in %s\n", sqlTable); char *oldSql = cloneString(sql); nextWord(&pos); nextWord(&pos); char *tableName = nextWord(&pos); sql = replaceChars(oldSql, tableName, track); } verbose(1, "Creating table definition for %s\n", track); sqlRemakeTable(conn, track, sql); if (!noBin) addBinToEmptyTable(conn, track); adjustSqlTableColumns(conn, track, bedSize); } freez(&sql); } else if (!oldTable) { int minLength; if (noLoad) minLength=6; else if (maxChromNameLength) minLength = maxChromNameLength; else minLength = hGetMinIndexLength(database); verbose(2, "INDEX chrom length: %d\n", minLength); /* Create definition statement. */ verbose(1, "Creating table definition for %s\n", track); dyStringPrintf(dy, "CREATE TABLE %s (\n", track); if (!noBin) dyStringAppend(dy, " bin smallint unsigned not null,\n"); dyStringAppend(dy, " chrom varchar(255) not null,\n"); dyStringAppend(dy, " chromStart int unsigned not null,\n"); dyStringAppend(dy, " chromEnd int unsigned not null,\n"); if (bedSize >= 4) maybeBedGraph(4, dy, " name varchar(255) not null,\n"); if (bedSize >= 5) { if (allowNegativeScores) maybeBedGraph(5, dy, " score int not null,\n"); else maybeBedGraph(5, dy, " score int unsigned not null,\n"); } if (bedSize >= 6) maybeBedGraph(6, dy, " strand char(1) not null,\n"); if (bedSize >= 7) maybeBedGraph(7, dy, " thickStart int unsigned not null,\n"); if (bedSize >= 8) maybeBedGraph(8, dy, " thickEnd int unsigned not null,\n"); /* As of 2004-11-22 the reserved field is used as itemRgb in code */ if (bedSize >= 9) maybeBedGraph(9, dy, " reserved int unsigned not null,\n"); if (bedSize >= 10) maybeBedGraph(10, dy, " blockCount int unsigned not null,\n"); if (bedSize >= 11) maybeBedGraph(11, dy, " blockSizes longblob not null,\n"); if (bedSize >= 12) maybeBedGraph(12, dy, " chromStarts longblob not null,\n"); if (bedSize >= 13) maybeBedGraph(13, dy, " expCount int unsigned not null,\n"); if (bedSize >= 14) maybeBedGraph(14, dy, " expIds longblob not null,\n"); if (bedSize >= 15) maybeBedGraph(15, dy, " expScores longblob not null,\n"); dyStringAppend(dy, "#Indices\n"); if (nameIx && (bedSize >= 4) && (0 == bedGraph)) dyStringAppend(dy, " INDEX(name(16)),\n"); if (noBin) { dyStringPrintf(dy, " INDEX(chrom(%d),chromStart)\n", minLength); } else { dyStringPrintf(dy, " INDEX(chrom(%d),bin)\n", minLength); } dyStringAppend(dy, ")\n"); if (noLoad) verbose(2,"%s", dy->string); else sqlRemakeTable(conn, track, dy->string); } verbose(1, "Saving %s\n", tab); writeBedTab(tab, bedList, bedSize); if ( ! noLoad ) { verbose(1, "Loading %s\n", database); if (customTrackLoader) sqlLoadTabFile(conn, tab, track, loadOptions|SQL_TAB_FILE_WARN_ON_WARN); else sqlLoadTabFile(conn, tab, track, loadOptions); if (! noHistory) { char comment[256]; /* add a comment to the history table and finish up connection */ safef(comment, sizeof(comment), "Add %d element(s) from bed list to %s table", slCount(bedList), track); hgHistoryComment(conn, comment); } if(fillInScoreColumn != NULL) { char query[500]; char buf[500]; struct sqlResult *sr; safef(query, sizeof(query), "select sum(score) from %s", track); if(sqlQuickQuery(conn, query, buf, sizeof(buf))) { unsigned sum = sqlUnsigned(buf); if (!sum) { safef(query, sizeof(query), "select min(%s), max(%s) from %s", fillInScoreColumn, fillInScoreColumn, track); if ((sr = sqlGetResult(conn, query)) != NULL) { char **row = sqlNextRow(sr); if(row != NULL) { float min = sqlFloat(row[0]); float max = sqlFloat(row[1]); if ( !(max == -1 && min == -1)) // if score is -1 then ignore, as if it werent present { if (max == min || sameString(row[0],row[1])) // this will lead to 'inf' score value in SQL update causing an error errAbort("Could not set score in table %s max(%s)=min(%s)=%s\n", track, fillInScoreColumn, fillInScoreColumn, row[0]); sqlFreeResult(&sr); // Calculate a, b s/t f(x) = ax + b maps min-max => minScore-1000 float a = (1000-minScore) / (max - min); float b = 1000 - ((1000-minScore) * max) / (max - min); safef(query, sizeof(query), "update %s set score = round((%f * %s) + %f)", track, a, fillInScoreColumn, b); int changed = sqlUpdateRows(conn, query, NULL); verbose(2, "update query: %s; changed: %d\n", query, changed); } else { sqlFreeResult(&sr); verbose(2, "score not updated; all values for column %s are -1\n", fillInScoreColumn); } } } } } } sqlDisconnect(&conn); /* if temp dir specified, unlink file to make it disappear */ if ((char *)NULL != tmpDir) unlink(tab); } else verbose(1, "No load option selected, see file: %s\n", tab); } /* static void loadDatabase() */
void chainLoadItems(struct track *tg) /* Load up all of the chains from correct table into tg->items * item list. At this stage to conserve memory for other tracks * we don't load the links into the components list until draw time. */ { char *table = tg->table; struct chain chain; int rowOffset; char **row; struct sqlConnection *conn = hAllocConn(database); struct sqlResult *sr = NULL; struct linkedFeatures *list = NULL, *lf; int qs; char *optionChrStr; char extraWhere[128] ; struct cartOptions *chainCart; chainCart = (struct cartOptions *) tg->extraUiData; optionChrStr = cartUsualStringClosestToHome(cart, tg->tdb, FALSE, "chromFilter", "All"); if (startsWith("chr",optionChrStr)) { snprintf(extraWhere, sizeof(extraWhere), "qName = \"%s\" and score > %d",optionChrStr, chainCart->scoreFilter); sr = hRangeQuery(conn, table, chromName, winStart, winEnd, extraWhere, &rowOffset); } else { if (chainCart->scoreFilter > 0) { snprintf(extraWhere, sizeof(extraWhere), "score > \"%d\"",chainCart->scoreFilter); sr = hRangeQuery(conn, table, chromName, winStart, winEnd, extraWhere, &rowOffset); } else { snprintf(extraWhere, sizeof(extraWhere), " "); sr = hRangeQuery(conn, table, chromName, winStart, winEnd, NULL, &rowOffset); } } while ((row = sqlNextRow(sr)) != NULL) { char buf[16]; chainHeadStaticLoad(row + rowOffset, &chain); AllocVar(lf); lf->start = lf->tallStart = chain.tStart; lf->end = lf->tallEnd = chain.tEnd; lf->grayIx = maxShade; if (chainCart->chainColor == chainColorScoreColors) { float normScore = sqlFloat((row+rowOffset)[11]); lf->grayIx = hGrayInRange(normScore, 0, 100, maxShade+1); lf->score = normScore; } else lf->score = chain.score; lf->filterColor = -1; if (chain.qStrand == '-') { lf->orientation = -1; qs = chain.qSize - chain.qEnd; } else { lf->orientation = 1; qs = chain.qStart; } int len = strlen(chain.qName) + 32; lf->name = needMem(len); safef(lf->name, len, "%s %c %dk", chain.qName, chain.qStrand, qs/1000); safef(buf, sizeof(buf), "%d", chain.id); lf->extra = cloneString(buf); slAddHead(&list, lf); } /* Make sure this is sorted if in full mode. Sort by score when * coloring by score and in dense */ if (tg->visibility != tvDense) slSort(&list, linkedFeaturesCmpStart); else if ((tg->visibility == tvDense) && (chainCart->chainColor == chainColorScoreColors)) slSort(&list, chainCmpScore); else slReverse(&list); tg->items = list; /* Clean up. */ sqlFreeResult(&sr); hFreeConn(&conn); } /* chainLoadItems() */