intrusive_ptr<Document> DocumentSourceProject::getCurrent() { intrusive_ptr<Document> pInDocument(pSource->getCurrent()); /* create the result document */ const size_t sizeHint = pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1); intrusive_ptr<Document> pResultDocument(Document::create(sizeHint)); if (!excludeId) { intrusive_ptr<const Value> pId( pInDocument->getField(Document::idName)); /* Previous projections could have removed _id, (or declined to generate it) so it might already not exist. Only attempt to add if we found it. */ if (pId.get()) pResultDocument->addField(Document::idName, pId); } /* Use the ExpressionObject to create the base result. If we're excluding fields at the top level, leave out the _id if it is found, because we took care of it above. */ pEO->addToDocument(pResultDocument, pInDocument, true); return pResultDocument; }
void DocumentSourceGroup::populate() { for(bool hasNext = !pSource->eof(); hasNext; hasNext = pSource->advance()) { intrusive_ptr<Document> pDocument(pSource->getCurrent()); /* get the _id document */ intrusive_ptr<const Value> pId(pIdExpression->evaluate(pDocument)); /* treat Undefined the same as NULL SERVER-4674 */ if (pId->getType() == Undefined) pId = Value::getNull(); /* Look for the _id value in the map; if it's not there, add a new entry with a blank accumulator. */ vector<intrusive_ptr<Accumulator> > *pGroup; GroupsType::iterator it(groups.find(pId)); if (it != groups.end()) { /* point at the existing accumulators */ pGroup = &it->second; } else { /* insert a new group into the map */ groups.insert(it, pair<intrusive_ptr<const Value>, vector<intrusive_ptr<Accumulator> > >( pId, vector<intrusive_ptr<Accumulator> >())); /* find the accumulator vector (the map value) */ it = groups.find(pId); pGroup = &it->second; /* add the accumulators */ const size_t n = vpAccumulatorFactory.size(); pGroup->reserve(n); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> pAccumulator( (*vpAccumulatorFactory[i])(pExpCtx)); pAccumulator->addOperand(vpExpression[i]); pGroup->push_back(pAccumulator); } } /* point at the existing key */ // unneeded atm // pId = it.first; /* tickle all the accumulators for the group we found */ const size_t n = pGroup->size(); for(size_t i = 0; i < n; ++i) (*pGroup)[i]->evaluate(pDocument); } /* start the group iterator */ groupsIterator = groups.begin(); if (groupsIterator != groups.end()) pCurrent = makeDocument(groupsIterator); populated = true; }
// --------------------------------------------------------------------------- bool CVcfReader::xAssignFeatureLocation( const CVcfData& data, unsigned int index, CRef<CSeq_feat> pFeature ) // --------------------------------------------------------------------------- { CRef<CSeq_id> pId(CReadUtil::AsSeqId(data.m_strChrom, m_iFlags)); if (data.IsSnv(index)) { pFeature->SetLocation().SetPnt().SetPoint(data.m_iPos-1); pFeature->SetLocation().SetPnt().SetId(*pId); return true; } if (data.IsDel(index)) { if (data.m_strRef.size()==2) { pFeature->SetLocation().SetPnt().SetPoint(data.m_iPos); pFeature->SetLocation().SetPnt().SetId(*pId); return true; } else { pFeature->SetLocation().SetInt().SetFrom(data.m_iPos); pFeature->SetLocation().SetInt().SetTo( data.m_iPos + data.m_strRef.length()-2); pFeature->SetLocation().SetInt().SetId(*pId); return true; } } if (data.IsIns(index)) { pFeature->SetLocation().SetInt().SetFrom(data.m_iPos-1); pFeature->SetLocation().SetInt().SetTo( data.m_iPos); pFeature->SetLocation().SetInt().SetId(*pId); return true; } if (data.IsDelins(index)) { pFeature->SetLocation().SetInt().SetFrom(data.m_iPos); pFeature->SetLocation().SetInt().SetTo( data.m_iPos+1); pFeature->SetLocation().SetInt().SetId(*pId); return true; } pFeature->SetLocation().SetInt().SetId( *pId ); pFeature->SetLocation().SetInt().SetFrom( data.m_iPos - 1 ); pFeature->SetLocation().SetInt().SetTo( data.m_iPos + data.m_strRef.length() - 2 ); return true; }
// --------------------------------------------------------------------------- bool CVcfReader::x_AssignFeatureLocation( const CVcfData& data, CRef<CSeq_feat> pFeature ) // --------------------------------------------------------------------------- { CRef<CSeq_id> pId(CReadUtil::AsSeqId(data.m_strChrom, m_iFlags)); pFeature->SetLocation().SetInt().SetId( *pId ); pFeature->SetLocation().SetInt().SetFrom( data.m_iPos - 1 ); pFeature->SetLocation().SetInt().SetTo( data.m_iPos + data.m_strRef.length() - 2 ); return true; }
TFileName CDiscussion::GetFileName(RFs& aSession) { TFileName aFilePath; TPtr pId(iDiscussionId->Des()); TInt aLocate; aFilePath.Append(pId.Left(aFilePath.MaxLength() - 24)); aFilePath.Append(_L(".xml")); while((aLocate = aFilePath.Locate('/')) != KErrNotFound) { aFilePath[aLocate] = TChar('~'); } CFileUtilities::CompleteWithApplicationPath(aSession, aFilePath); return aFilePath; }
intrusive_ptr<Document> DocumentSourceProject::getCurrent() { intrusive_ptr<Document> pInDocument(pSource->getCurrent()); /* create the result document */ const size_t sizeHint = pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1); intrusive_ptr<Document> pResultDocument(Document::create(sizeHint)); if (!excludeId) { intrusive_ptr<const Value> pId( pInDocument->getField(Document::idName)); pResultDocument->addField(Document::idName, pId); } /* use the ExpressionObject to create the base result */ pEO->addToDocument(pResultDocument, pInDocument); return pResultDocument; }
intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson( BSONElement *pBsonElement, const intrusive_ptr<ExpressionContext> &pCtx) { uassert(15947, "a group's fields must be specified in an object", pBsonElement->type() == Object); intrusive_ptr<DocumentSourceGroup> pGroup( DocumentSourceGroup::create(pCtx)); bool idSet = false; BSONObj groupObj(pBsonElement->Obj()); BSONObjIterator groupIterator(groupObj); while(groupIterator.more()) { BSONElement groupField(groupIterator.next()); const char *pFieldName = groupField.fieldName(); if (strcmp(pFieldName, Document::idName.c_str()) == 0) { uassert(15948, "a group's _id may only be specified once", !idSet); BSONType groupType = groupField.type(); if (groupType == Object) { /* Use the projection-like set of field paths to create the group-by key. */ Expression::ObjectCtx oCtx( Expression::ObjectCtx::DOCUMENT_OK); intrusive_ptr<Expression> pId( Expression::parseObject(&groupField, &oCtx)); pGroup->setIdExpression(pId); idSet = true; } else if (groupType == String) { string groupString(groupField.String()); const char *pGroupString = groupString.c_str(); if ((groupString.length() == 0) || (pGroupString[0] != '$')) goto StringConstantId; string pathString( Expression::removeFieldPrefix(groupString)); intrusive_ptr<ExpressionFieldPath> pFieldPath( ExpressionFieldPath::create(pathString)); pGroup->setIdExpression(pFieldPath); idSet = true; } else { /* pick out the constant types that are allowed */ switch(groupType) { case NumberDouble: case String: case Object: case Array: case jstOID: case Bool: case Date: case NumberInt: case Timestamp: case NumberLong: case jstNULL: StringConstantId: // from string case above { intrusive_ptr<const Value> pValue( Value::createFromBsonElement(&groupField)); intrusive_ptr<ExpressionConstant> pConstant( ExpressionConstant::create(pValue)); pGroup->setIdExpression(pConstant); idSet = true; break; } default: uassert(15949, str::stream() << "a group's _id may not include fields of BSON type " << groupType, false); } } } else { /* Treat as a projection field with the additional ability to add aggregation operators. */ uassert(15950, str::stream() << "the group aggregate field name " << *pFieldName << " cannot be an operator name", *pFieldName != '$'); uassert(15951, str::stream() << "the group aggregate field " << *pFieldName << "must be defined as an expression inside an object", groupField.type() == Object); BSONObj subField(groupField.Obj()); BSONObjIterator subIterator(subField); size_t subCount = 0; for(; subIterator.more(); ++subCount) { BSONElement subElement(subIterator.next()); /* look for the specified operator */ GroupOpDesc key; key.pName = subElement.fieldName(); const GroupOpDesc *pOp = (const GroupOpDesc *)bsearch( &key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc), GroupOpDescCmp); uassert(15952, str::stream() << "unknown group operator \"" << key.pName << "\"", pOp); intrusive_ptr<Expression> pGroupExpr; BSONType elementType = subElement.type(); if (elementType == Object) { Expression::ObjectCtx oCtx( Expression::ObjectCtx::DOCUMENT_OK); pGroupExpr = Expression::parseObject( &subElement, &oCtx); } else if (elementType == Array) { uassert(15953, str::stream() << "aggregating group operators are unary (" << key.pName << ")", false); } else { /* assume its an atomic single operand */ pGroupExpr = Expression::parseOperand(&subElement); } pGroup->addAccumulator( pFieldName, pOp->pFactory, pGroupExpr); } uassert(15954, str::stream() << "the computed aggregate \"" << pFieldName << "\" must specify exactly one operator", subCount == 1); } } uassert(15955, "a group specification must include an _id", idSet); return pGroup; }
// --------------------------------------------------------------------------- bool CVcfReader::xAssignFeatureLocationSet( const CVcfData& data, CRef<CSeq_feat> pFeat ) // --------------------------------------------------------------------------- { CRef<CSeq_id> pId(CReadUtil::AsSeqId(data.m_strChrom, m_iFlags)); //context: // we are trying to package all the allele of this feature into a single // variation_ref, hence, they all need a common location. // Referenced location differ between the different types of variations, // so we need to find the most specific variation type that describes them // all. Once the actual variation type has been found we can set the location // accordingly. // in practice, we will choose the common variation type if it is indeed // common for all the alleles. Otherwise, we just make it a MNV. if (data.m_SetType == CVcfData::ST_ALL_SNV) { //set location for SNVs pFeat->SetLocation().SetPnt().SetPoint(data.m_iPos-1); pFeat->SetLocation().SetPnt().SetId(*pId); return true; } if (data.m_SetType == CVcfData::ST_ALL_MNV) { //set location for MNV. This will be the location of the reference pFeat->SetLocation().SetInt().SetFrom(data.m_iPos-1); pFeat->SetLocation().SetInt().SetTo(data.m_iPos + data.m_strRef.size() - 2); pFeat->SetLocation().SetInt().SetId(*pId); return true; } if (data.m_SetType == CVcfData::ST_ALL_INS) { //set location for INSs. Will always be a point! //m_iPos points to the 1-based position of the first //nt that is unique between alt and ref pFeat->SetLocation().SetPnt().SetPoint(data.m_iPos-1); pFeat->SetLocation().SetPnt().SetId(*pId); return true; } if (data.m_SetType == CVcfData::ST_ALL_DEL) { if (data.m_strRef.size() == 1) { //deletion of a single base pFeat->SetLocation().SetPnt().SetPoint(data.m_iPos-1); pFeat->SetLocation().SetPnt().SetId(*pId); } else { pFeat->SetLocation().SetInt().SetFrom(data.m_iPos-1); //-1 for 0-based, //another -1 for inclusive end-point ( i.e. [], not [) ) pFeat->SetLocation().SetInt().SetTo( data.m_iPos -1 + data.m_strRef.length() - 1); pFeat->SetLocation().SetInt().SetId(*pId); } return true; } //default: For MNV's we will use the single starting point //NB: For references of size >=2, this location will not //match the reference allele. Future Variation-ref //normalization code will address these issues, //and obviate the need for this code altogether. if (data.m_strRef.size() == 1) { //deletion of a single base pFeat->SetLocation().SetPnt().SetPoint(data.m_iPos-1); pFeat->SetLocation().SetPnt().SetId(*pId); } else { pFeat->SetLocation().SetInt().SetFrom(data.m_iPos-1); pFeat->SetLocation().SetInt().SetTo( data.m_iPos -1 + data.m_strRef.length() - 1); pFeat->SetLocation().SetInt().SetId(*pId); } return true; }
TBool CDiscussion::WriteDiscussionToFileL() { TPtr pId(iDiscussionId->Des()); if(iDiscussionInMemory && pId.Length() > 0) { RFs aSession = CCoeEnv::Static()->FsSession(); TFileName aFilePath = GetFileName(aSession); RFileWriteStream aFile; TBuf8<128> aBuf; #ifdef _DEBUG if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC Start: Save discussion %d to file"), iItemId); iDiscussionReadObserver->DiscussionDebug(aBuf); } #endif if(aFile.Replace(aSession, aFilePath, EFileStreamText|EFileWrite) == KErrNone) { CleanupClosePushL(aFile); CXmppAtomEntryParser* aAtomEntryParser = CXmppAtomEntryParser::NewLC(); aFile.WriteL(_L8("<?xml version='1.0' encoding='UTF-8'?>\r\n")); aBuf.Format(_L8("\t<discussion notify='%d'>\r\n"), iNotify); aFile.WriteL(aBuf); for(TInt i = 0; i < iEntries.Count(); i++) { CThreadedEntry* aThread = iEntries[i]; CAtomEntryData* aEntry = aThread->GetEntry(); aFile.WriteL(_L8("\t\t")); aFile.WriteL(aAtomEntryParser->AtomEntryToXmlL(aEntry, KNullDesC8, true)); aFile.WriteL(_L8("\r\n")); if(aThread->CommentCount() > 0) { aFile.WriteL(_L8("\t\t<comments>\r\n")); for(TInt x = 0; x < aThread->CommentCount(); x++) { CAtomEntryData* aComment = aThread->GetCommentByIndex(x); aFile.WriteL(_L8("\t\t\t")); aFile.WriteL(aAtomEntryParser->AtomEntryToXmlL(aComment, aEntry->GetId(), true)); aFile.WriteL(_L8("\r\n")); } aFile.WriteL(_L8("\t\t</comments>\r\n")); } } aFile.WriteL(_L8("\t</discussion>\r\n</?xml?>")); CleanupStack::PopAndDestroy(); // CXmppAtomEntryParser CleanupStack::PopAndDestroy(&aFile); #ifdef _DEBUG if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC End: Save discussion %d to file"), iItemId); iDiscussionReadObserver->DiscussionDebug(aBuf); } #endif return true; } #ifdef _DEBUG else { if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC Fail: Save discussion %d to file"), iItemId); iDiscussionReadObserver->DiscussionDebug(aBuf); } } #endif } return false; }
void CDiscussion::ReadDiscussionToMemoryL() { TPtr pId(iDiscussionId->Des()); if(!iDiscussionInMemory && pId.Length() > 0) { RFs aSession = CCoeEnv::Static()->FsSession(); TFileName aFilePath = GetFileName(aSession); RFile aFile; TInt aFileSize; TBuf8<128> aBuf; if(iUnreadData) { iUnreadData->iEntries = 0; iUnreadData->iReplies = 0; } iDiscussionInMemory = true; #ifdef _DEBUG if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC Start: Cache discussion %d to memory"), iItemId); iDiscussionReadObserver->DiscussionDebug(aBuf); } #endif if(aFile.Open(aSession, aFilePath, EFileStreamText|EFileRead) == KErrNone) { CleanupClosePushL(aFile); aFile.Size(aFileSize); // Create buffer & read file HBufC8* aFileData = HBufC8::NewLC(aFileSize); TPtr8 pFileData(aFileData->Des()); aFile.Read(pFileData); CXmlParser* aXmlParser = CXmlParser::NewLC(pFileData); if(aXmlParser->MoveToElement(_L8("discussion"))) { // Buzz iNotify = aXmlParser->GetBoolAttribute(_L8("notify")); CXmppAtomEntryParser* aAtomEntryParser = CXmppAtomEntryParser::NewLC(); TBuf8<32> aReferenceId; while(aXmlParser->MoveToElement(_L8("entry"))) { CAtomEntryData* aAtomEntry = aAtomEntryParser->XmlToAtomEntryLC(aXmlParser->GetStringData(), aReferenceId, true); AddEntryOrCommentLD(aAtomEntry, aReferenceId); } CleanupStack::PopAndDestroy(); // CXmppAtomEntryParser } CleanupStack::PopAndDestroy(2); // aXmlParser, aFileData CleanupStack::PopAndDestroy(&aFile); #ifdef _DEBUG if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC End: Cache discussion %d to memory (%d bytes)"), iItemId, aFileSize); iDiscussionReadObserver->DiscussionDebug(aBuf); } #endif } #ifdef _DEBUG else { if(iDiscussionReadObserver) { aBuf.Format(_L8("DISC Fail: Cache discussion %d to memory"), iItemId); iDiscussionReadObserver->DiscussionDebug(aBuf); } } #endif } }
intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson( BSONElement *pBsonElement, const intrusive_ptr<ExpressionContext> &pCtx) { assert(pBsonElement->type() == Object); // CW TODO must be an object intrusive_ptr<DocumentSourceGroup> pGroup( DocumentSourceGroup::create(pCtx)); bool idSet = false; BSONObj groupObj(pBsonElement->Obj()); BSONObjIterator groupIterator(groupObj); while(groupIterator.more()) { BSONElement groupField(groupIterator.next()); const char *pFieldName = groupField.fieldName(); if (strcmp(pFieldName, Document::idName.c_str()) == 0) { assert(!idSet); // CW TODO _id specified multiple times BSONType groupType = groupField.type(); if (groupType == Object) { /* Use the projection-like set of field paths to create the group-by key. */ Expression::ObjectCtx oCtx( Expression::ObjectCtx::DOCUMENT_OK); intrusive_ptr<Expression> pId( Expression::parseObject(&groupField, &oCtx)); pGroup->setIdExpression(pId); idSet = true; } else if (groupType == String) { string groupString(groupField.String()); const char *pGroupString = groupString.c_str(); if ((groupString.length() == 0) || (pGroupString[0] != '$')) goto StringConstantId; string pathString( Expression::removeFieldPrefix(groupString)); intrusive_ptr<ExpressionFieldPath> pFieldPath( ExpressionFieldPath::create(pathString)); pGroup->setIdExpression(pFieldPath); idSet = true; } else { /* pick out the constant types that are allowed */ switch(groupType) { case NumberDouble: case String: case Object: case Array: case jstOID: case Bool: case Date: case NumberInt: case Timestamp: case NumberLong: case jstNULL: StringConstantId: // from string case above { intrusive_ptr<const Value> pValue( Value::createFromBsonElement(&groupField)); intrusive_ptr<ExpressionConstant> pConstant( ExpressionConstant::create(pValue)); pGroup->setIdExpression(pConstant); idSet = true; break; } default: assert(false); // CW TODO disallowed constant group key } } } else { /* Treat as a projection field with the additional ability to add aggregation operators. */ assert(*pFieldName != '$'); // CW TODO error: field name can't be an operator assert(groupField.type() == Object); // CW TODO error: must be an operator expression BSONObj subField(groupField.Obj()); BSONObjIterator subIterator(subField); size_t subCount = 0; for(; subIterator.more(); ++subCount) { BSONElement subElement(subIterator.next()); /* look for the specified operator */ GroupOpDesc key; key.pName = subElement.fieldName(); const GroupOpDesc *pOp = (const GroupOpDesc *)bsearch( &key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc), GroupOpDescCmp); assert(pOp); // CW TODO error: operator not found intrusive_ptr<Expression> pGroupExpr; BSONType elementType = subElement.type(); if (elementType == Object) { Expression::ObjectCtx oCtx( Expression::ObjectCtx::DOCUMENT_OK); pGroupExpr = Expression::parseObject( &subElement, &oCtx); } else if (elementType == Array) { assert(false); // CW TODO group operators are unary } else { /* assume its an atomic single operand */ pGroupExpr = Expression::parseOperand(&subElement); } pGroup->addAccumulator( pFieldName, pOp->pFactory, pGroupExpr); } assert(subCount == 1); // CW TODO error: only one operator allowed } } assert(idSet); // CW TODO error: missing _id specification return pGroup; }
intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson( BSONElement *pBsonElement, const intrusive_ptr<ExpressionContext> &pExpCtx) { uassert(15947, "a group's fields must be specified in an object", pBsonElement->type() == Object); intrusive_ptr<DocumentSourceGroup> pGroup( DocumentSourceGroup::create(pExpCtx)); bool idSet = false; BSONObj groupObj(pBsonElement->Obj()); BSONObjIterator groupIterator(groupObj); while(groupIterator.more()) { BSONElement groupField(groupIterator.next()); const char *pFieldName = groupField.fieldName(); if (str::equals(pFieldName, "_id")) { uassert(15948, "a group's _id may only be specified once", !idSet); BSONType groupType = groupField.type(); if (groupType == Object) { /* Use the projection-like set of field paths to create the group-by key. */ Expression::ObjectCtx oCtx(Expression::ObjectCtx::DOCUMENT_OK); intrusive_ptr<Expression> pId( Expression::parseObject(&groupField, &oCtx)); pGroup->setIdExpression(pId); idSet = true; } else if (groupType == String) { const string groupString = groupField.str(); if (!groupString.empty() && groupString[0] == '$') { pGroup->setIdExpression(ExpressionFieldPath::parse(groupString)); idSet = true; } } if (!idSet) { // constant id - single group pGroup->setIdExpression(ExpressionConstant::create(Value(groupField))); idSet = true; } } else { /* Treat as a projection field with the additional ability to add aggregation operators. */ uassert(16414, str::stream() << "the group aggregate field name '" << pFieldName << "' cannot be used because $group's field names cannot contain '.'", !str::contains(pFieldName, '.') ); uassert(15950, str::stream() << "the group aggregate field name '" << pFieldName << "' cannot be an operator name", pFieldName[0] != '$'); uassert(15951, str::stream() << "the group aggregate field '" << pFieldName << "' must be defined as an expression inside an object", groupField.type() == Object); BSONObj subField(groupField.Obj()); BSONObjIterator subIterator(subField); size_t subCount = 0; for(; subIterator.more(); ++subCount) { BSONElement subElement(subIterator.next()); /* look for the specified operator */ GroupOpDesc key; key.name = subElement.fieldName(); const GroupOpDesc *pOp = (const GroupOpDesc *)bsearch( &key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc), GroupOpDescCmp); uassert(15952, str::stream() << "unknown group operator '" << key.name << "'", pOp); intrusive_ptr<Expression> pGroupExpr; BSONType elementType = subElement.type(); if (elementType == Object) { Expression::ObjectCtx oCtx( Expression::ObjectCtx::DOCUMENT_OK); pGroupExpr = Expression::parseObject( &subElement, &oCtx); } else if (elementType == Array) { uasserted(15953, str::stream() << "aggregating group operators are unary (" << key.name << ")"); } else { /* assume its an atomic single operand */ pGroupExpr = Expression::parseOperand(&subElement); } pGroup->addAccumulator(pFieldName, pOp->factory, pGroupExpr); } uassert(15954, str::stream() << "the computed aggregate '" << pFieldName << "' must specify exactly one operator", subCount == 1); } } uassert(15955, "a group specification must include an _id", idSet); return pGroup; }