void tagStormQueryMain(char *query) /* tagStormQuery - Find stanzas in tag storm based on SQL-like query.. */ { /* Get parsed out query */ struct lineFile *lf = lineFileOnString("query", TRUE, cloneString(query)); struct rqlStatement *rql = rqlStatementParse(lf); int stormCount = slCount(rql->tableList); if (stormCount != 1) errAbort("Can only handle one tag storm file in query, got %d", stormCount); char *tagsFileName = rql->tableList->name; /* Read in tags */ struct tagStorm *tags = tagStormFromFile(tagsFileName); /* Expand any field names with wildcards. */ struct slName *allFieldList = tagStormFieldList(tags); rql->fieldList = wildExpandList(allFieldList, rql->fieldList, TRUE); /* Traverse tree applying query */ struct lm *lm = lmInit(0); doSelect = sameWord(rql->command, "select"); traverse(tags, tags->forest, rql, lm); tagStormFree(&tags); if (sameWord(rql->command, "count")) printf("%d\n", matchCount); }
static void addExtras(char *extraFile, struct trackHubCheckOptions *checkOptions) /* Add settings from extra file (e.g. for specific hub display site) */ { verbose(2, "Accepting extra settings in '%s'\n", extraFile); checkOptions->extraFile = extraFile; checkOptions->extra = hashNew(0); struct lineFile *lf = NULL; if (startsWith("http", extraFile)) { struct dyString *ds = netSlurpUrl(extraFile); char *s = dyStringCannibalize(&ds); lf = lineFileOnString(extraFile, TRUE, s); } else { lf = lineFileOpen(extraFile, TRUE); } char *line; while (lineFileNextReal(lf, &line)) { hashAdd(checkOptions->extra, line, NULL); } lineFileClose(&lf); verbose(3, "Found %d extra settings\n", hashNumEntries(checkOptions->extra)); }
char *quotedPrintableDecode(char *input) /* Use Quoted-Printable standard to decode a string. Return decoded * string which will be freeMem'd. */ { size_t inplen = strlen(input); char *result = (char *)needMem(inplen+1); size_t j=0; char *line = NULL; int size = 0; int i = 0; boolean newLine = FALSE; struct lineFile *lf = lineFileOnString("", TRUE, cloneString(input)); while (lineFileNext(lf, &line, &size)) { newLine = quotedPCollapse(line); size = strlen(line); for (i = 0; i < size; ) result[j++] = line[i++]; if (newLine) result[j++] = '\n'; } lineFileClose(&lf); /* frees cloned string */ result[j] = 0; /* terminate text string */ return result; }
struct gapCalc *gapCalcFromString(char *s) /* Return gapCalc from description string. */ { struct lineFile *lf = lineFileOnString("string", TRUE, cloneString(s)); struct gapCalc *gapCalc = gapCalcRead(lf); lineFileClose(&lf); return gapCalc; }
struct rqlStatement *rqlStatementParseString(char *string) /* Return a parsed-out RQL statement based on string */ { struct lineFile *lf = lineFileOnString("query", TRUE, cloneString(string)); struct rqlStatement *rql = rqlStatementParse(lf); lineFileClose(&lf); return rql; }
struct asObject *asParseText(char *text) /* Parse autoSql from text (as opposed to file). */ { char *dupe = cloneString(text); struct lineFile *lf = lineFileOnString("text", TRUE, dupe); struct asObject *objList = asParseLineFile(lf); freez(&dupe); return objList; }
void trySubmitUpload(struct sqlConnection *conn, char *rawText) /* Called when they've submitted from uploads page */ { struct lineFile *lf = lineFileOnString("uploaded data", TRUE, rawText); struct customPp *cpp = customPpNew(lf); struct hash *settings = hashNew(8); addIfNonempty(settings, hggMinVal, "minVal"); addIfNonempty(settings, hggMaxVal, "maxVal"); addIfNonempty(settings, hggMaxGapToFill, "maxGapToFill"); addIfNonempty(settings, hggLabelVals, "linesAt"); struct customTrack *trackList = chromGraphParser(database, cpp, cartUsualString(cart, hggFormatType, formatNames[0]), cartUsualString(cart, hggMarkerType, cgfMarkerGenomic), cartUsualString(cart, hggColumnLabels, colLabelNames[0]), nullIfAllSpace(cartUsualString(cart, hggDataSetName, NULL)), nullIfAllSpace(cartUsualString(cart, hggDataSetDescription, NULL)), settings, TRUE); updateCustomTracks(trackList); }
static char *limitText(char *text) /* read text string and limit to 1000 actual data lines */ { struct dyString *limitedText = dyStringNew(0); /* yes, opening with FALSE so as not to destroy the original string */ struct lineFile *lf = lineFileOnString("limitText", FALSE, text); char *lineStart = NULL; int lineLength = 0; int legitimateLineCount = 0; while (legitimateLineCount < 1000 && lineFileNext(lf, &lineStart, &lineLength)) { char *s, c; s = skipLeadingSpaces(lineStart); c = s[0]; if (c != 0 && c != '#') ++legitimateLineCount; dyStringAppendN(limitedText, lineStart, lineLength); } if ((legitimateLineCount == 1000) && lineFileNext(lf, &lineStart, &lineLength)) warn("WARNING: defined regions limit of 1000 definitions reached at line %d<BR>\n", lf->lineIx-1); lineFileClose(&lf); return (dyStringCannibalize(&limitedText)); }
static struct bed4 *parseRegionInput(char *db, char *inputString, int maxRegions, int maxErrs, struct dyString *dyWarn) /* scan the user region definition, turn into a bed list */ { int regionCount = 0; int errCount = 0; struct bed4 *bedList = NULL; struct lineFile *lf = lineFileOnString("userData", TRUE, inputString); char *line = NULL; while (lineFileNextReal(lf, &line)) { char *chromName = NULL; int chromStart = 0; int chromEnd = 0; char *regionName = NULL; // Chop a copy of line so we can display line if there's an error. char copy[strlen(line)+1]; safecpy(copy, sizeof(copy), line); char *words[5]; int wordCount = chopByWhite(copy, words, ArraySize(words)); boolean badFormat = FALSE; boolean gotError = FALSE; /* might be something of the form: chrom:start-end optionalRegionName */ if (((1 == wordCount) || (2 == wordCount)) && hgParseChromRange(NULL, words[0], &chromName, &chromStart, &chromEnd)) { if (2 == wordCount) regionName = cloneString(words[1]); } else if (!((3 == wordCount) || (4 == wordCount))) { dyStringPrintf(dyWarn, "line %d: '%s': " "unrecognized format. Please enter 3- or 4-column BED or " "a chr:start-end position range optionally followed by a name.\n", lf->lineIx, line); badFormat = TRUE; gotError = TRUE; } else { chromName = words[0]; // Make sure chromStart and chromEnd are numbers if (!isNumericString(words[1])) { dyStringPrintf(dyWarn, "line %d: '%s': chromStart must be a number but is '%s'\n", lf->lineIx, line, words[1]); gotError = TRUE; } if (!isNumericString(words[2])) { dyStringPrintf(dyWarn, "line %d: '%s': chromEnd must be a number but is '%s'\n", lf->lineIx, line, words[2]); gotError = TRUE; } if (! gotError) { chromStart = atoi(words[1]); chromEnd = atoi(words[2]); if (wordCount > 3) regionName = cloneString(words[3]); } } char *officialChromName = chromName ? hgOfficialChromName(db, chromName) : NULL; if (! badFormat) { if (NULL == officialChromName) { dyStringPrintf(dyWarn, "line %d: '%s': chrom name '%s' not recognized in this assembly\n", lf->lineIx, line, chromName ? chromName : words[0]); gotError = TRUE; } else if (illegalCoordinate(db, officialChromName, chromStart, chromEnd, line, lf->lineIx, dyWarn)) { gotError = TRUE; } } if (gotError) { errCount++; if (errCount > maxErrs && maxErrs > 0) { dyStringPrintf(dyWarn, "Exceeded maximum number of errors (%d), quitting\n", maxErrs); break; } else continue; } ++regionCount; if (regionCount > maxRegions && maxRegions > 0) { dyStringPrintf(dyWarn, "line %d: limit of %d region definitions exceeded, skipping the rest\n", lf->lineIx, maxRegions); break; } struct bed4 *bedEl = bed4New(officialChromName, chromStart, chromEnd, regionName); slAddHead(&bedList, bedEl); } lineFileClose(&lf); // Keep regions in same order as user entered them: slReverse(&bedList); return (bedList); }
void doPastedIdentifiers(struct sqlConnection *conn) /* Process submit in paste identifiers page. */ { char *idText = trimSpaces(cartString(cart, hgtaPastedIdentifiers)); htmlOpen("Table Browser (Input Identifiers)"); if (isNotEmpty(idText)) { /* Write terms to temp file, checking whether they have matches, and * save temp file name. */ boolean saveIdText = (strlen(idText) < MAX_IDTEXT); char *idTextForLf = saveIdText ? cloneString(idText) : idText; struct lineFile *lf = lineFileOnString("idText", TRUE, idTextForLf); char *line, *word; struct tempName tn; FILE *f; int totalTerms = 0, foundTerms = 0; struct slName* missingTerms = NULL; struct dyString *exampleMissingIds = dyStringNew(256); char *actualDb = database; if (sameWord(curTable, WIKI_TRACK_TABLE)) actualDb = wikiDbName(); struct hTableInfo *hti = maybeGetHti(actualDb, curTable, conn); char *idField = getIdField(actualDb, curTrack, curTable, hti); if (idField == NULL) { warn("Sorry, I can't tell which field of table %s to treat as the " "identifier field.", curTable); webNewSection("Table Browser"); cartRemove(cart, hgtaIdentifierDb); cartRemove(cart, hgtaIdentifierTable); cartRemove(cart, hgtaIdentifierFile); mainPageAfterOpen(conn); htmlClose(); return; } struct slName *allTerms = NULL, *term; while (lineFileNext(lf, &line, NULL)) { while ((word = nextWord(&line)) != NULL) { term = slNameNew(word); slAddHead(&allTerms, term); totalTerms++; } } slReverse(&allTerms); lineFileClose(&lf); char *extraWhere = NULL; int maxIdsInWhere = cartUsualInt(cart, "hgt_maxIdsInWhere", DEFAULT_MAX_IDS_IN_WHERE); if (totalTerms > 0 && totalTerms <= maxIdsInWhere) extraWhere = slNameToInExpression(idField, allTerms); struct lm *lm = lmInit(0); struct hash *matchHash = getAllPossibleIds(conn, lm, idField, extraWhere); trashDirFile(&tn, "hgtData", "identifiers", ".key"); f = mustOpen(tn.forCgi, "w"); for (term = allTerms; term != NULL; term = term->next) { struct slName *matchList = NULL, *match; if (matchHash == NULL) { matchList = slNameNew(term->name); } else { /* Support multiple alias->id mappings: */ char upcased[1024]; safecpy(upcased, sizeof(upcased), term->name); touppers(upcased); struct hashEl *hel = hashLookup(matchHash, upcased); if (hel != NULL) { matchList = slNameNew((char *)hel->val); while ((hel = hashLookupNext(hel)) != NULL) { match = slNameNew((char *)hel->val); slAddHead(&matchList, match); } } } if (matchList != NULL) { foundTerms++; for (match = matchList; match != NULL; match = match->next) { mustWrite(f, match->name, strlen(match->name)); mustWrite(f, "\n", 1); } } else { slAddHead(&missingTerms, slNameNew(term->name)); } } slReverse(&missingTerms); carefulClose(&f); cartSetString(cart, hgtaIdentifierDb, database); cartSetString(cart, hgtaIdentifierTable, curTable); cartSetString(cart, hgtaIdentifierFile, tn.forCgi); if (saveIdText) freez(&idTextForLf); else cartRemove(cart, hgtaPastedIdentifiers); int missingCount = totalTerms - foundTerms; if (missingCount > 0) { char *xrefTable, *aliasField; getXrefInfo(conn, &xrefTable, NULL, &aliasField); boolean xrefIsSame = xrefTable && sameString(curTable, xrefTable); struct tempName tn; trashDirFile(&tn, "hgt/missingIds", cartSessionId(cart), ".tmp"); FILE *f = mustOpen(tn.forCgi, "w"); int exampleCount = 0; for (term = missingTerms; term != NULL; term = term->next) { if (exampleCount < 10) { ++exampleCount; dyStringPrintf(exampleMissingIds, "%s\n", term->name); } fprintf(f, "%s\n", term->name); } carefulClose(&f); dyStringPrintf(exampleMissingIds, "\n<a href=%s>Complete list of missing identifiers<a>\n", tn.forHtml); warn("Note: %d of the %d given identifiers have no match in " "table %s, field %s%s%s%s%s. " "Try the \"describe table schema\" button for more " "information about the table and field.\n" "%d %smissing identifier(s):\n" "%s\n", (totalTerms - foundTerms), totalTerms, curTable, idField, (xrefTable ? (xrefIsSame ? "" : " or in alias table ") : ""), (xrefTable ? (xrefIsSame ? "" : xrefTable) : ""), (xrefTable ? (xrefIsSame ? " or in field " : ", field ") : ""), (xrefTable ? aliasField : ""), exampleCount, exampleCount < missingCount ? "example " : "", exampleMissingIds->string ); webNewSection("Table Browser"); } lmCleanup(&lm); hashFree(&matchHash); } else { cartRemove(cart, hgtaIdentifierFile); } mainPageAfterOpen(conn); htmlClose(); }
static void readPartHeaderMB(struct mimeBuf *b, struct mimePart *p, char *altHeader) /* Reads the header lines of the mimePart, saves the header settings in a hash. */ { struct dyString *fullLine = dyStringNew(0); char *key=NULL, *val=NULL; struct lineFile *lf = NULL; char *line = NULL; char *lineAhead = NULL; int size = 0; p->hdr = newHash(3); //debug //fprintf(stderr,"headers dumpMB: "); //dumpMB(b); //debug if (altHeader) { lf = lineFileOnString("MIME Header", TRUE, altHeader); } /* read ahead one line, skipping any leading blanks lines */ do { if (altHeader) lineFileNext(lf, &lineAhead, &size); else lineAhead = getLineMB(b); } while (sameString(lineAhead,"")); do { /* accumulate a full header line - some emailers split into mpl lines */ dyStringClear(fullLine); do { line = lineAhead; if (altHeader) lineFileNext(lf, &lineAhead, &size); else lineAhead = getLineMB(b); dyStringAppend(fullLine,line); if (!altHeader) freez(&line); } while (isspace(lineAhead[0])); line = fullLine->string; //fprintf(stderr,"found a line! [%s]\n",line); //debug key = line; val = strchr(line,':'); if (!val) errAbort("readPartHeaderMB error - header-line colon not found, line=[%s]",line); *val = 0; val++; key=trimSpaces(key); // since the hash is case-sensitive, convert to lower case for ease of matching tolowers(key); val=trimSpaces(val); hashAdd(p->hdr,key,cloneString(val)); //debug //fprintf(stderr,"MIME header: key=[%s], val=[%s]\n",key,val); //fflush(stderr); } while (!sameString(lineAhead,"")); if (altHeader) { if (nlType == nlt_undet) nlType = lf->nlType; lineFileClose(&lf); } else { freez(&lineAhead); } dyStringFree(&fullLine); }
static struct bed *parseRegionInput(char *inputString) /* scan the user region definition, turn into a bed list */ { int itemCount = 0; struct bed *bedList = NULL; struct bed *bedEl; int wordCount; char *words[5]; struct lineFile *lf; lf = lineFileOnString("userData", TRUE, inputString); while (0 != (wordCount = lineFileChopNext(lf, words, ArraySize(words)))) { char *chromName = NULL; int chromStart = 0; int chromEnd = 0; char *regionName = NULL; /* might be something of the form: chrom:start-end optionalRegionName */ if (((1 == wordCount) || (2 == wordCount)) && hgParseChromRange(NULL, words[0], &chromName, &chromStart, &chromEnd)) { if (2 == wordCount) regionName = cloneString(words[1]); } else if (!((3 == wordCount) || (4 == wordCount))) { int i; struct dyString *errMessage = dyStringNew(0); for (i = 0; i < wordCount; ++i) dyStringPrintf(errMessage, "%s ", words[i]); errAbort("line %d: '%s'<BR>\n" "illegal bed size, expected 3 or 4 fields, found %d\n", lf->lineIx, dyStringCannibalize(&errMessage), wordCount); } else { chromName = hgOfficialChromName(database, words[0]); chromStart = sqlSigned(words[1]); chromEnd = sqlSigned(words[2]); if (wordCount > 3) regionName = cloneString(words[3]); } ++itemCount; if (itemCount > 1000) { warn("limit 1000 region definitions reached at line %d<BR>\n", lf->lineIx); break; } AllocVar(bedEl); bedEl->chrom = chromName; if (NULL == bedEl->chrom) errAbort("at line %d, chrom name '%s' %s %s not recognized in this assembly %d", lf->lineIx, words[0], words[1], words[2], wordCount); bedEl->chromStart = chromStart; bedEl->chromEnd = chromEnd; if (illegalCoordinate(bedEl->chrom, bedEl->chromStart, bedEl->chromEnd)) errAbort("illegal input at line %d: %s %d %d", lf->lineIx, bedEl->chrom, bedEl->chromStart, bedEl->chromEnd); if (wordCount > 3) bedEl->name = regionName; else bedEl->name = NULL; /* if we wanted to give artifical names to each item */ #ifdef NOT { char name[128]; safef(name, ArraySize(name), "item_%04d", itemCount); bedEl->name = cloneString(name); } #endif slAddHead(&bedList, bedEl); } lineFileClose(&lf); // slSort(&bedList, bedCmp); /* this would do chrom,chromStart order */ slReverse(&bedList); /* with no sort, it is in order as user entered */ return (bedList); }