AREXPORT void ArMapId::log(const char *prefix) const { time_t idTime = getTimestamp(); char timeBuf[500]; struct tm *idTm = NULL; if (idTime != -1) { idTm = localtime(&idTime); } if (idTm != NULL) { strftime(timeBuf, sizeof(timeBuf), "%c", idTm); } else { snprintf(timeBuf, sizeof(timeBuf), "NULL"); } ArLog::log(ArLog::Normal, "%s%smap %s %s%s checksum = \"%s\" size = %i time = %s", ((prefix != NULL) ? prefix : ""), ((prefix != NULL) ? " " : ""), getFileName(), (!ArUtil::isStrEmpty(getSourceName()) ? "source " : ""), (!ArUtil::isStrEmpty(getSourceName()) ? getSourceName() : ""), getDisplayChecksum(), getSize(), timeBuf); }
void DocumentSourceSort::serializeToArray(vector<Value>& array, bool explain) const { if (explain) { // always one Value for combined $sort + $limit array.push_back(Value( DOC(getSourceName() << DOC("sortKey" << serializeSortKey() << "limit" << (limitSrc ? Value(limitSrc->getLimit()) : Value()))))); } else { // one Value for $sort and maybe a Value for $limit array.push_back(Value(DOC(getSourceName() << serializeSortKey()))); if (limitSrc) { limitSrc->serializeToArray(array); } } }
int Open(int lSamprate, int wChannels, int wBitsPerSample, int bufferlenms, int prebufferms) { CMyEncCfg cfg; char OutFilename[MAX_PATH], *srcFilename=NULL; // buf[MAX_PATH], // *tsrcFilename; w_offset = writtentime = 0; numchan = wChannels; srate = lSamprate; bps = wBitsPerSample; strcpy(config_AACoutdir,cfg.OutDir); GetNewFileName(OutFilename); Cpcmaac=new Cfaac(); #ifdef USE_IMPORT_TAG /* GetWindowText(out.hMainWindow,buf,sizeof(buf)); tsrcFilename=getWASourceName(buf); srcFilename=Cpcmaac->getSourceFilename(cfg.TagSrcPath,tsrcFilename,cfg.TagSrcExt); FREE_ARRAY(tsrcFilename);*/ srcFilename=getSourceName(out.hMainWindow); #endif if(!Cpcmaac->Init(srcFilename,OutFilename,lSamprate,wBitsPerSample,wChannels,-1)) ERROR_O(0); FREE_ARRAY(srcFilename); return 0; }
Value DocumentSourceGeoNear::serialize(bool explain) const { MutableDocument result; if (coordsIsArray) { result.setField("near", Value(BSONArray(coords))); } else { result.setField("near", Value(coords)); } // not in buildGeoNearCmd result.setField("distanceField", Value(distanceField->getPath(false))); result.setField("limit", Value(limit)); if (maxDistance > 0) result.setField("maxDistance", Value(maxDistance)); result.setField("query", Value(query)); result.setField("spherical", Value(spherical)); result.setField("distanceMultiplier", Value(distanceMultiplier)); if (includeLocs) result.setField("includeLocs", Value(includeLocs->getPath(false))); result.setField("uniqueDocs", Value(uniqueDocs)); return Value(DOC(getSourceName() << result.freeze())); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Lock::DBRead lk(_ns); Client::Context ctx(_ns, storageGlobalParams.dbpath, /*doVersion=*/false); ClientCursorPin pin(_cursorId); ClientCursor* cursor = pin.c(); uassert(17135, "Cursor deleted. Was the collection or database dropped?", cursor); Runner* runner = cursor->getRunner(); runner->restoreState(); return Value(DOC(getSourceName() << DOC("query" << Value(_query) << "sort" << (!_sort.isEmpty() ? Value(_sort) : Value()) << "limit" << (_limit ? Value(_limit->getLimit()) : Value()) << "fields" << (!_projection.isEmpty() ? Value(_projection) : Value()) // << "indexOnly" << canUseCoveredIndex(cursor) // << "cursorType" << cursor->c()->toString() ))); // TODO get more plan information }
Value DocumentSourceInternalSplitPipeline::serialize( boost::optional<ExplainOptions::Verbosity> explain) const { std::string mergeTypeString; switch (_mergeType) { case HostTypeRequirement::kAnyShard: mergeTypeString = "anyShard"; break; case HostTypeRequirement::kPrimaryShard: mergeTypeString = "primaryShard"; break; case HostTypeRequirement::kLocalOnly: mergeTypeString = "localOnly"; break; case HostTypeRequirement::kMongoS: mergeTypeString = "mongos"; break; case HostTypeRequirement::kNone: default: break; } return Value( Document{{getSourceName(), Value{Document{{"mergeType", mergeTypeString.empty() ? Value() : Value(mergeTypeString)}}}}}); }
Value DocumentSourceGroup::serialize(bool explain) const { MutableDocument insides; // add the _id if (_idFieldNames.empty()) { invariant(_idExpressions.size() == 1); insides["_id"] = _idExpressions[0]->serialize(explain); } else { // decomposed document case invariant(_idExpressions.size() == _idFieldNames.size()); MutableDocument md; for (size_t i = 0; i < _idExpressions.size(); i++) { md[_idFieldNames[i]] = _idExpressions[i]->serialize(explain); } insides["_id"] = md.freezeToValue(); } // add the remaining fields const size_t n = vFieldName.size(); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i](); insides[vFieldName[i]] = Value(DOC(accum->getOpName() << vpExpression[i]->serialize(explain))); } if (_doingMerge) { // This makes the output unparsable (with error) on pre 2.6 shards, but it will never // be sent to old shards when this flag is true since they can't do a merge anyway. insides["$doingMerge"] = Value(true); } return Value(DOC(getSourceName() << insides.freeze())); }
void incForProfiling (GC_state s, size_t amount, GC_sourceSeqIndex sourceSeqIndex) { uint32_t *sourceSeq; GC_sourceIndex topSourceIndex; if (DEBUG_PROFILE) fprintf (stderr, "incForProfiling (%"PRIuMAX", "FMTSSI")\n", (uintmax_t)amount, sourceSeqIndex); assert (sourceSeqIndex < s->sourceMaps.sourceSeqsLength); sourceSeq = s->sourceMaps.sourceSeqs[sourceSeqIndex]; topSourceIndex = sourceSeq[0] > 0 ? sourceSeq[sourceSeq[0]] : SOURCES_INDEX_UNKNOWN; if (DEBUG_PROFILE) { profileIndent (); fprintf (stderr, "bumping %s by %"PRIuMAX"\n", getSourceName (s, topSourceIndex), (uintmax_t)amount); } s->profiling.data->countTop[topSourceIndex] += amount; s->profiling.data->countTop[sourceIndexToProfileMasterIndex (s, topSourceIndex)] += amount; if (s->profiling.stack) enterForProfiling (s, sourceSeqIndex); if (SOURCES_INDEX_GC == topSourceIndex) s->profiling.data->totalGC += amount; else s->profiling.data->total += amount; if (s->profiling.stack) leaveForProfiling (s, sourceSeqIndex); }
char* profileIndexSourceName (GC_state s, GC_sourceIndex i) { char* res; if (i < s->sourceMaps.sourcesLength) res = getSourceName (s, i); else res = s->sourceMaps.sourceNames[profileMasterIndexToSourceNameIndex (s, i)]; return res; }
MM::VOID MM::FlowEdge::toString(MM::String * buf, MM::UINT32 indent) { MM::Edge::toString(buf, indent); getSourceName()->toString(buf); buf->space(); buf->append((MM::CHAR*)MM::FlowEdge::MIN_STR, MM::FlowEdge::MIN_LEN); getExp()->toString(buf); buf->append((MM::CHAR*)MM::FlowEdge::MINGT_STR, MM::FlowEdge::MINGT_LEN); buf->space(); getTargetName()->toString(buf); }
void addToStackForProfiling (GC_state s, GC_profileMasterIndex i) { GC_profileData p; GC_profileStack ps; p = s->profiling.data; ps = getProfileStackInfo (s, i); if (DEBUG_PROFILE) fprintf (stderr, "adding %s to stack lastTotal = %"PRIuMAX" lastTotalGC = %"PRIuMAX"\n", getSourceName (s, i), (uintmax_t)p->total, (uintmax_t)p->totalGC); ps->lastTotal = p->total; ps->lastTotalGC = p->totalGC; }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); // Get planner-level explain info from the underlying PlanExecutor. BSONObjBuilder explainBuilder; Status explainStatus(ErrorCodes::InternalError, ""); { const NamespaceString nss(_ns); AutoGetCollectionForRead autoColl(pExpCtx->opCtx, nss); massert(17392, "No _exec. Were we disposed before explained?", _exec); _exec->restoreState(pExpCtx->opCtx); explainStatus = Explain::explainStages(pExpCtx->opCtx, _exec.get(), ExplainCommon::QUERY_PLANNER, &explainBuilder); _exec->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); // Add explain results from the query system into the agg explain output. if (explainStatus.isOK()) { BSONObj explainObj = explainBuilder.obj(); invariant(explainObj.hasField("queryPlanner")); out["queryPlanner"] = Value(explainObj["queryPlanner"]); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
Value DocumentSourceCursor::serialize(bool explain) const { // we never parse a documentSourceCursor, so we only serialize for explain if (!explain) return Value(); Status explainStatus(ErrorCodes::InternalError, ""); scoped_ptr<TypeExplain> plan; { Lock::DBRead lk(pExpCtx->opCtx->lockState(), _ns); Client::Context ctx(pExpCtx->opCtx, _ns, /*doVersion=*/ false); massert(17392, "No _runner. Were we disposed before explained?", _runner); _runner->restoreState(pExpCtx->opCtx); TypeExplain* explainRaw; explainStatus = _runner->getInfo(&explainRaw, NULL); if (explainStatus.isOK()) plan.reset(explainRaw); _runner->saveState(); } MutableDocument out; out["query"] = Value(_query); if (!_sort.isEmpty()) out["sort"] = Value(_sort); if (_limit) out["limit"] = Value(_limit->getLimit()); if (!_projection.isEmpty()) out["fields"] = Value(_projection); if (explainStatus.isOK()) { out["plan"] = Value(extractInfo(plan)); } else { out["planError"] = Value(explainStatus.toString()); } return Value(DOC(getSourceName() << out.freezeToValue())); }
void enterForProfiling (GC_state s, GC_sourceSeqIndex sourceSeqIndex) { uint32_t i; GC_sourceIndex sourceIndex; uint32_t *sourceSeq; if (DEBUG_PROFILE) fprintf (stderr, "enterForProfiling ("FMTSSI")\n", sourceSeqIndex); assert (s->profiling.stack); assert (sourceSeqIndex < s->sourceMaps.sourceSeqsLength); sourceSeq = s->sourceMaps.sourceSeqs[sourceSeqIndex]; for (i = 1; i <= sourceSeq[0]; i++) { sourceIndex = sourceSeq[i]; if (DEBUG_ENTER_LEAVE or DEBUG_PROFILE) { profileIndent (); fprintf (stderr, "(entering %s\n", getSourceName (s, sourceIndex)); profileDepth++; } enterSourceForProfiling (s, (GC_profileMasterIndex)sourceIndex); enterSourceForProfiling (s, sourceIndexToProfileMasterIndex (s, sourceIndex)); } }
Value DocumentSourceCollStats::serialize(boost::optional<ExplainOptions::Verbosity> explain) const { return Value(Document{{getSourceName(), _collStatsSpec}}); }
Value DocumentSourceExchange::serialize(boost::optional<ExplainOptions::Verbosity> explain) const { return Value(DOC(getSourceName() << _exchange->getSpec().toBSON())); }
Value DocumentSourceOut::serialize(boost::optional<ExplainOptions::Verbosity> explain) const { massert( 17000, "$out shouldn't have different db than input", _outputNs.db() == pExpCtx->ns.db()); return Value(DOC(getSourceName() << _outputNs.coll())); }
Value DocumentSourceLimit::serialize(bool explain) const { return Value(DOC(getSourceName() << limit)); }
Value DocumentSourceSkip::serialize(bool explain) const { return Value(DOC(getSourceName() << _skip)); }
Value DocumentSourceRedact::serialize(bool explain) const { return Value(DOC(getSourceName() << _expression.get()->serialize())); }
Value DocumentSourceProject::serialize(bool explain) const { return Value(DOC(getSourceName() << pEO->serialize())); }
Value DocumentSourceUnwind::serialize(bool explain) const { verify(_unwindPath); return Value(DOC(getSourceName() << _unwindPath->getPath(true))); }