예제 #1
0
파일: geoparser.cpp 프로젝트: Andiry/mongo
static Status isLoopClosed(const vector<S2Point>& loop, const BSONElement loopElt) {
    if (loop.empty()) {
        return BAD_VALUE("Loop has no vertices: " << loopElt.toString(false));
    }

    if (loop[0] != loop[loop.size() - 1]) {
        return BAD_VALUE("Loop is not closed: " << loopElt.toString(false));
    }

    return Status::OK();
}
예제 #2
0
    // Gets the string representation of a BSON object that can be correctly written to a CSV file
    string csvString (const BSONElement& object) {
        const char* binData; // Only used with BinData type

        switch (object.type()) {
        case MinKey:
            return "$MinKey";
        case MaxKey:
            return "$MaxKey";
        case NumberInt:
        case NumberDouble:
        case NumberLong:
        case Bool:
            return object.toString(false);
        case String:
        case Symbol:
            return csvEscape(object.toString(false), true);
        case Object:
            return csvEscape(object.jsonString(Strict, false));
        case Array:
            return csvEscape(object.jsonString(Strict, false));
        case BinData:
            int len;
            binData = object.binDataClean(len);
            return toHex(binData, len);
        case jstOID:
            return "ObjectID(" + object.OID().toString() + ")"; // OIDs are always 24 bytes
        case Date:
            return timeToISOString(object.Date() / 1000);
        case Timestamp:
            return csvEscape(object.jsonString(Strict, false));
        case RegEx:
            return csvEscape("/" + string(object.regex()) + "/" + string(object.regexFlags()));
        case Code:
            return csvEscape(object.toString(false));
        case CodeWScope:
            if (string(object.codeWScopeScopeDataUnsafe()) == "") {
                return csvEscape(object.toString(false));
            } else {
                return csvEscape(object.jsonString(Strict, false));
            }
        case EOO:
        case Undefined:
        case DBRef:
        case jstNULL:
            cerr << "Invalid BSON object type for CSV output: " << object.type() << endl;
            return "";
        }
        // Can never get here
        verify(false);
        return "";
    }
예제 #3
0
    // Parse "coordinates" field of GeoJSON LineString
    // e.g. "coordinates": [ [100.0, 0.0], [101.0, 1.0] ]
    // Or a line in "coordinates" field of GeoJSON MultiLineString
    static Status parseGeoJSONLineCoordinates(const BSONElement& elem, S2Polyline* out) {
        vector<S2Point> vertices;
        Status status = parseArrayOfCoodinates(elem, &vertices);
        if (!status.isOK()) return status;

        eraseDuplicatePoints(&vertices);
        if (vertices.size() < 2)
            return BAD_VALUE("GeoJSON LineString must have at least 2 vertices: " << elem.toString(false));

        string err;
        if (!S2Polyline::IsValid(vertices, &err))
            return BAD_VALUE("GeoJSON LineString is not valid: " << err << " " << elem.toString(false));
        out->Init(vertices);
        return Status::OK();
    }
예제 #4
0
TEST(generator, test1)
{
//   const CHAR *js = "{a:[{c:[{d:{e:[1,2]}}, {d:\"abc\"}]}, {c:[{d:{e:[5,6]}}, {d:{e:[7,8]}}]}], b:10}" ;
   {
   const CHAR *js = "{no:1,name:\"A\",age:2,array1:[{array2:[{array3:[{array4:[\"array5\",\"temp4\"]},\"temp3\"]},\"temp2\"]},\"temp1\"]}"; 
   CHAR *raw = NULL ;
   getBSONRaw( js, &raw ) ;
   ASSERT_TRUE( NULL != raw ) ;
   BSONObj obj( raw ) ;
   BSONObj keyDef = BSON("array1.array2.array3.array4.1" << 1 ) ;
   _ixmIndexKeyGen gen( keyDef ) ;
   Ordering order(Ordering::make(keyDef)) ;
   BSONObjSet keySet( keyDef ) ;
   BSONElement arr ;
   INT32 rc = SDB_OK ;
   rc = gen.getKeys( obj, keySet, &arr ) ;
   ASSERT_TRUE( SDB_OK == rc ) ;
   for ( BSONObjSet::const_iterator itr = keySet.begin() ;
         itr != keySet.end() ;
         itr++ )
   {
      cout << itr->toString() << endl ;
   }

   cout << "arr:" << arr.toString( true, true )  << endl ;
   }
}
예제 #5
0
intrusive_ptr<DocumentSource> DocumentSourceFacet::createFromBson(
    BSONElement elem, const intrusive_ptr<ExpressionContext>& expCtx) {

    std::vector<FacetPipeline> facetPipelines;
    for (auto&& rawFacet : extractRawPipelines(elem)) {
        const auto facetName = rawFacet.first;

        auto pipeline = uassertStatusOK(Pipeline::parse(rawFacet.second, expCtx));

        uassert(40172,
                str::stream() << "sub-pipeline in $facet stage cannot be empty: " << facetName,
                !pipeline->getSources().empty());

        // Disallow any stages that need to be the first stage in the pipeline.
        for (auto&& stage : pipeline->getSources()) {
            if (stage->isValidInitialSource()) {
                uasserted(40173,
                          str::stream() << stage->getSourceName()
                                        << " is not allowed to be used within a $facet stage: "
                                        << elem.toString());
            }
        }

        facetPipelines.emplace_back(facetName, std::move(pipeline));
    }

    return new DocumentSourceFacet(std::move(facetPipelines), expCtx);
}
예제 #6
0
    S2AccessMethod::S2AccessMethod(IndexCatalogEntry* btreeState, RecordStore* rs)
        : BtreeBasedAccessMethod(btreeState, rs) {

        const IndexDescriptor* descriptor = btreeState->descriptor();

        ExpressionParams::parse2dsphereParams(descriptor->infoObj(),
                                              &_params);

        int geoFields = 0;

        // Categorize the fields we're indexing and make sure we have a geo field.
        BSONObjIterator i(descriptor->keyPattern());
        while (i.more()) {
            BSONElement e = i.next();
            if (e.type() == String && IndexNames::GEO_2DSPHERE == e.String() ) {
                ++geoFields;
            }
            else {
                // We check for numeric in 2d, so that's the check here
                uassert( 16823, (string)"Cannot use " + IndexNames::GEO_2DSPHERE +
                                    " index with other special index types: " + e.toString(),
                         e.isNumber() );
            }
        }

        uassert(16750, "Expect at least one geo field, spec=" + descriptor->keyPattern().toString(),
                geoFields >= 1);

        if (descriptor->isSparse()) {
            warning() << "Sparse option ignored for index spec "
                      << descriptor->keyPattern().toString() << "\n";
        }
    }
예제 #7
0
 string DBClientWithCommands::getLastError() { 
     BSONObj info = getLastErrorDetailed();
     BSONElement e = info["err"];
     if( e.eoo() ) return "";
     if( e.type() == Object ) return e.toString();
     return e.str();
 }
예제 #8
0
   const CHAR* _omAgentNodeMgr::_getSvcNameFromArg( const CHAR * arg )
   {
      const CHAR *pSvcName = NULL ;

      try
      {
         BSONObj objArg1( arg ) ;
         BSONElement e = objArg1.getField( PMD_OPTION_SVCNAME ) ;
         if ( e.type() != String )
         {
            PD_LOG( PDERROR, "Param[%s] type[%s] error: %s",
                    PMD_OPTION_SVCNAME, e.type(), e.toString().c_str() ) ;
            goto error ;
         }
         pSvcName = e.valuestrsafe() ;
      }
      catch( std::exception &e )
      {
         PD_LOG( PDERROR, "Ocuur exception: %s", e.what() ) ;
         goto error ;
      }

   done:
      return pSvcName ;
   error:
      goto done ;
   }
Status getStatusFromCommandResult(const BSONObj& result) {
    BSONElement okElement = result["ok"];
    BSONElement codeElement = result["code"];
    BSONElement errmsgElement = result["errmsg"];

    // StaleConfigException doesn't pass "ok" in legacy servers
    BSONElement dollarErrElement = result["$err"];

    if (okElement.eoo() && dollarErrElement.eoo()) {
        return Status(ErrorCodes::CommandResultSchemaViolation,
                      mongoutils::str::stream() << "No \"ok\" field in command result " << result);
    }
    if (okElement.trueValue()) {
        return Status::OK();
    }
    int code = codeElement.numberInt();
    if (0 == code) {
        code = ErrorCodes::UnknownError;
    }
    std::string errmsg;
    if (errmsgElement.type() == String) {
        errmsg = errmsgElement.String();
    } else if (!errmsgElement.eoo()) {
        errmsg = errmsgElement.toString();
    }

    // we can't use startsWith(errmsg, "no such")
    // as we have errors such as "no such collection"
    if (code == ErrorCodes::UnknownError &&
        (str::startsWith(errmsg, "no such cmd") || str::startsWith(errmsg, "no such command"))) {
        code = ErrorCodes::CommandNotFound;
    }

    return Status(ErrorCodes::Error(code), errmsg, result);
}
예제 #10
0
파일: jsobj.cpp 프로젝트: agiamas/mongo
    string BSONObj::toString() const {
        if ( isEmpty() ) return "{}";

        stringstream s;
        s << "{ ";
        BSONObjIterator i(*this);
        bool first = true;
        while ( 1 ) {
            massert( "Object does not end with EOO", i.more() );
            BSONElement e = i.next( true );
            massert( "Invalid element size", e.size() > 0 );
            massert( "Element too large", e.size() < ( 1 << 30 ) );
            int offset = e.rawdata() - this->objdata();
            massert( "Element extends past end of object",
                    e.size() + offset <= this->objsize() );
            e.validate();
            bool end = ( e.size() + offset == this->objsize() );
            if ( e.eoo() ) {
                massert( "EOO Before end of object", end );
                break;
            }
            if ( first )
                first = false;
            else
                s << ", ";
            s << e.toString();
        }
        s << " }";
        return s.str();
    }
예제 #11
0
파일: dbclient.cpp 프로젝트: tanfulai/mongo
 string DBClientWithCommands::getLastError() { 
     BSONObj info;
     runCommand("admin", getlasterrorcmdobj, info);
     BSONElement e = info["err"];
     if( e.eoo() ) return "";
     if( e.type() == Object ) return e.toString();
     return e.str();
 }
예제 #12
0
파일: geoparser.cpp 프로젝트: Andiry/mongo
static Status parseBigSimplePolygonCoordinates(const BSONElement& elem, BigSimplePolygon* out) {
    if (Array != elem.type())
        return BAD_VALUE("Coordinates of polygon must be an array");


    const vector<BSONElement>& coordinates = elem.Array();
    // Only one loop is allowed in a BigSimplePolygon
    if (coordinates.size() != 1) {
        return BAD_VALUE(
            "Only one simple loop is allowed in a big polygon: " << elem.toString(false));
    }

    vector<S2Point> exteriorVertices;
    Status status = Status::OK();
    string err;

    status = parseArrayOfCoordinates(coordinates.front(), &exteriorVertices);
    if (!status.isOK())
        return status;

    status = isLoopClosed(exteriorVertices, coordinates.front());
    if (!status.isOK())
        return status;

    eraseDuplicatePoints(&exteriorVertices);

    // The last point is duplicated.  We drop it, since S2Loop expects no
    // duplicate points
    exteriorVertices.resize(exteriorVertices.size() - 1);

    // At least 3 vertices.
    if (exteriorVertices.size() < 3) {
        return BAD_VALUE("Loop must have at least 3 different vertices: " << elem.toString(false));
    }

    unique_ptr<S2Loop> loop(new S2Loop(exteriorVertices));
    // Check whether this loop is valid.
    if (!loop->IsValid(&err)) {
        return BAD_VALUE("Loop is not valid: " << elem.toString(false) << " " << err);
    }

    out->Init(loop.release());
    return Status::OK();
}
예제 #13
0
파일: commands.cpp 프로젝트: wjin/mongo
 string Command::parseNsFullyQualified(const string& dbname, const BSONObj& cmdObj) const {
     BSONElement first = cmdObj.firstElement();
     uassert(17005,
             mongoutils::str::stream() << "Main argument to " << first.fieldNameStringData() <<
                     " must be a fully qualified namespace string.  Found: " <<
                     first.toString(false),
             first.type() == mongo::String &&
             NamespaceString::validCollectionComponent(first.valuestr()));
     return first.String();
 }
예제 #14
0
   static INT32 _rtnParseQueryMeta( const BSONObj &meta, const CHAR *&scanType,
                                    const CHAR *&indexName, INT32 &indexLID,
                                    INT32 &direction, BSONObj &blockObj )
   {
      INT32 rc = SDB_OK ;
      BSONElement ele ;

      rc = rtnGetStringElement( meta, FIELD_NAME_SCANTYPE, &scanType ) ;
      PD_RC_CHECK( rc, PDERROR, "Failed to get field[%s], rc: %d",
                   FIELD_NAME_SCANTYPE, rc ) ;

      if ( 0 == ossStrcmp( scanType, VALUE_NAME_IXSCAN ) )
      {
         ele = meta.getField( FIELD_NAME_INDEXBLOCKS ) ;

         rc = rtnGetStringElement( meta, FIELD_NAME_INDEXNAME, &indexName ) ;
         PD_RC_CHECK( rc, PDERROR, "Failed to get field[%s], rc: %d",
                      FIELD_NAME_INDEXNAME, rc ) ;

         rc = rtnGetIntElement( meta, FIELD_NAME_INDEXLID, indexLID ) ;
         PD_RC_CHECK( rc, PDERROR, "Failed to get field[%s], rc: %d",
                      FIELD_NAME_INDEXLID, rc ) ;

         rc = rtnGetIntElement( meta, FIELD_NAME_DIRECTION, direction ) ;
         PD_RC_CHECK( rc, PDERROR, "Failed to get field[%s], rc: %d",
                      FIELD_NAME_DIRECTION, rc ) ;
      }
      else if ( 0 == ossStrcmp( scanType, VALUE_NAME_TBSCAN ) )
      {
         ele = meta.getField( FIELD_NAME_DATABLOCKS ) ;
      }
      else
      {
         PD_LOG( PDERROR, "Query meta[%s] scan type error",
                 meta.toString().c_str() ) ;
         rc = SDB_INVALIDARG ;
         goto error ;
      }

      if ( Array != ele.type() )
      {
         PD_LOG( PDERROR, "Block field[%s] type error",
                 ele.toString().c_str() ) ;
         rc = SDB_INVALIDARG ;
         goto error ;
      }
      blockObj = ele.embeddedObject() ;

   done:
      return rc ;
   error:
      goto done ;
   }
예제 #15
0
Status ModifierObjectReplace::apply() const {
    dassert(!_preparedState->noOp);

    // Remove the contents of the provided doc.
    mutablebson::Document& doc = _preparedState->doc;
    mutablebson::Element current = doc.root().leftChild();
    mutablebson::Element srcIdElement = doc.end();
    while (current.ok()) {
        mutablebson::Element toRemove = current;
        current = current.rightSibling();

        // Skip _id field element -- it should not change
        if (toRemove.getFieldName() == idFieldName) {
            srcIdElement = toRemove;
            continue;
        }

        Status status = toRemove.remove();
        if (!status.isOK()) {
            return status;
        }
    }

    // Insert the provided contents instead.
    BSONElement dstIdElement;
    BSONObjIterator it(_val);
    while (it.more()) {
        BSONElement elem = it.next();
        if (elem.fieldNameStringData() == idFieldName) {
            dstIdElement = elem;

            // Do not duplicate _id field
            if (srcIdElement.ok()) {
                if (srcIdElement.compareWithBSONElement(dstIdElement, nullptr, true) != 0) {
                    return Status(ErrorCodes::ImmutableField,
                                  str::stream() << "The _id field cannot be changed from {"
                                                << srcIdElement.toString()
                                                << "} to {"
                                                << dstIdElement.toString()
                                                << "}.");
                }
                continue;
            }
        }

        Status status = doc.root().appendElement(elem);
        if (!status.isOK()) {
            return status;
        }
    }

    return Status::OK();
}
예제 #16
0
파일: type_chunk.cpp 프로젝트: i80and/mongo
std::string ChunkType::genID(const NamespaceString& nss, const BSONObj& o) {
    StringBuilder buf;
    buf << nss.ns() << "-";

    BSONObjIterator i(o);
    while (i.more()) {
        BSONElement e = i.next();
        buf << e.fieldName() << "_" << e.toString(false, true);
    }

    return buf.str();
}
예제 #17
0
파일: bsonobj.cpp 프로젝트: kingfs/tokumxse
void BSONObj::_assertInvalid() const {
    StringBuilder ss;
    int os = objsize();
    ss << "BSONObj size: " << os << " (0x" << integerToHex( os ) << ") is invalid. "
       << "Size must be between 0 and " << BSONObjMaxInternalSize
       << "(" << ( BSONObjMaxInternalSize/(1024*1024) ) << "MB)";
    try {
        BSONElement e = firstElement();
        ss << " First element: " << e.toString();
    }
    catch ( ... ) { }
    massert( 10334 , ss.str() , 0 );
}
예제 #18
0
int gridfs_getxattr(const char* path, const char* name, char* value, size_t size)
{
  if(strcmp(path, "/") == 0) {
    return -ENOATTR;
  }

  path = fuse_to_mongo_path(path);
  const char* attr_name = unnamespace_xattr(name);
  if(!attr_name) {
    return -ENOATTR;
  }

  if(open_files.find(path) != open_files.end()) {
    return -ENOATTR;
  }

  ScopedDbConnection sdc(*gridfs_options.conn_string);
  bool digest = true;
  string err = "";
  sdc.conn().DBClientWithCommands::auth(gridfs_options.db, gridfs_options.username, gridfs_options.password, err, digest);
  fprintf(stderr, "DEBUG: %s\n", err.c_str());
  GridFS gf(sdc.conn(), gridfs_options.db);
  GridFile file = gf.findFile(path);
  sdc.done();

  if(!file.exists()) {
    return -ENOENT;
  }

  BSONObj metadata = file.getMetadata();
  if(metadata.isEmpty()) {
    return -ENOATTR;
  }

  BSONElement field = metadata[attr_name];
  if(field.eoo()) {
    return -ENOATTR;
  }

  string field_str = field.toString();
  int len = field_str.size() + 1;
  if(size == 0) {
    return len;
  } else if(size < len) {
    return -ERANGE;
  }

  memcpy(value, field_str.c_str(), len);

  return len;
}
예제 #19
0
	int gridfs_getxattr(const char* path, const char* name, char* value, size_t size)
#endif /* __APPLE__ */
{
  if(strcmp(path, "/") == 0) {
    return -ENOATTR;
  }

  path = fuse_to_mongo_path(path);
  const char* attr_name = unnamespace_xattr(name);
  if(!attr_name) {
    return -ENOATTR;
  }

  if(open_files.find(path) != open_files.end()) {
    return -ENOATTR;
  }

  ScopedDbConnection sdc(*gridfs_options.conn_string);
  ScopedDbConnection_init(sdc);
  GridFS gf(sdc.conn(), gridfs_options.db, gridfs_options.prefix);
  GridFile file = gf.findFile(path);
  sdc.done();

  if(!file.exists()) {
    return -ENOENT;
  }

  BSONObj metadata = file.getMetadata();
  if(metadata.isEmpty()) {
    return -ENOATTR;
  }

  BSONElement field = metadata[attr_name];
  if(field.eoo()) {
    return -ENOATTR;
  }

  string field_str = field.toString();
  int len = field_str.size() + 1;
  if(size == 0) {
    return len;
  } else if(size < len) {
    return -ERANGE;
  }

  memcpy(value, field_str.c_str(), len);

  return len;
}
예제 #20
0
파일: MongoDB.cpp 프로젝트: chenmusun/Email
long CMongoDB::SaveFileToMongoDB(string& remotename, string& strPath, string& strRtr)
{
	/*
	GfidFS 对象用来存储文件,构造是需要传入DBClientConnection实例,使用数据库名称
	storeFile函数用来上传指定路径的文件,参数一:文件路径,参数二:数据库存储名称
	write函数下载文件,参数:文件保存路径
	*/
	if (m_nUseDB != 1)
	{
		strRtr = "No use Database!";
		return 0;
	}
	if (remotename.length() <= 0)
	{
		strRtr = "RemoteName is Empty!";
		return -1;
	}
	if (strPath.length() <= 0)
	{
		strRtr = "Path is Empty!";
		return -1;
	}
	try
	{
		string strerr;
		if (connect.isStillConnected() && m_bConnect)
		{
			GridFS fs(connect, m_strDBName);
			//fs.storeFile(strPath, remotename);
			BSONObj obj = fs.storeFile(strPath, remotename);
			if (!obj.isEmpty())
			{
				BSONElement em = obj.getField("md5");
				strRtr = em.toString();
			}
			else return -1;
		}
		else return -1;
		return 0;
	}
	catch (...)
	{
		return -1;
	}
}
예제 #21
0
    /**
     * Find and parse all geometry elements on the appropriate field path from the document.
     */
    static void extractGeometries(const BSONObj& doc,
                                  const string& path,
                                  vector<StoredGeometry*>* geometries) {

        BSONElementSet geomElements;
        // NOTE: Annoyingly, we cannot just expand arrays b/c single 2d points are arrays, we need
        // to manually expand all results to check if they are geometries
        doc.getFieldsDotted(path, geomElements, false /* expand arrays */);

        for (BSONElementSet::iterator it = geomElements.begin(); it != geomElements.end(); ++it) {

            const BSONElement& el = *it;
            auto_ptr<StoredGeometry> stored(StoredGeometry::parseFrom(el));

            if (stored.get()) {
                // Valid geometry element
                geometries->push_back(stored.release());
            }
            else if (el.type() == Array) {

                // Many geometries may be in an array
                BSONObjIterator arrIt(el.Obj());
                while (arrIt.more()) {

                    const BSONElement nextEl = arrIt.next();
                    stored.reset(StoredGeometry::parseFrom(nextEl));

                    if (stored.get()) {
                        // Valid geometry element
                        geometries->push_back(stored.release());
                    }
                    else {
                        warning() << "geoNear stage read non-geometry element " << nextEl.toString()
                                  << " in array " << el.toString();
                    }
                }
            }
            else {
                warning() << "geoNear stage read non-geometry element " << el.toString();
            }
        }
    }
예제 #22
0
    S2AccessMethod::S2AccessMethod(IndexCatalogEntry* btreeState)
        : BtreeBasedAccessMethod(btreeState) {

        const IndexDescriptor* descriptor = btreeState->descriptor();

        // Set up basic params.
        _params.maxKeysPerInsert = 200;
        // This is advisory.
        _params.maxCellsInCovering = 50;
        // Near distances are specified in meters...sometimes.
        _params.radius = kRadiusOfEarthInMeters;
        // These are not advisory.
        _params.finestIndexedLevel = configValueWithDefault(descriptor, "finestIndexedLevel",
            S2::kAvgEdge.GetClosestLevel(500.0 / _params.radius));
        _params.coarsestIndexedLevel = configValueWithDefault(descriptor, "coarsestIndexedLevel",
            S2::kAvgEdge.GetClosestLevel(100 * 1000.0 / _params.radius));
        uassert(16747, "coarsestIndexedLevel must be >= 0", _params.coarsestIndexedLevel >= 0);
        uassert(16748, "finestIndexedLevel must be <= 30", _params.finestIndexedLevel <= 30);
        uassert(16749, "finestIndexedLevel must be >= coarsestIndexedLevel",
                _params.finestIndexedLevel >= _params.coarsestIndexedLevel);

        int geoFields = 0;

        // Categorize the fields we're indexing and make sure we have a geo field.
        BSONObjIterator i(descriptor->keyPattern());
        while (i.more()) {
            BSONElement e = i.next();
            if (e.type() == String && IndexNames::GEO_2DSPHERE == e.String() ) {
                ++geoFields;
            }
            else {
                // We check for numeric in 2d, so that's the check here
                uassert( 16823, (string)"Cannot use " + IndexNames::GEO_2DSPHERE +
                                    " index with other special index types: " + e.toString(),
                         e.isNumber() );
            }
        }
        uassert(16750, "Expect at least one geo field, spec=" + descriptor->keyPattern().toString(),
                geoFields >= 1);
    }
예제 #23
0
파일: commands.cpp 프로젝트: jewkesy/mongo
Status Command::getStatusFromCommandResult(const BSONObj& result) {
    BSONElement okElement = result["ok"];
    BSONElement codeElement = result["code"];
    BSONElement errmsgElement = result["errmsg"];
    if (okElement.eoo()) {
        return Status(ErrorCodes::CommandResultSchemaViolation,
                      mongoutils::str::stream() << "No \"ok\" field in command result " <<
                      result);
    }
    if (okElement.trueValue()) {
        return Status::OK();
    }
    int code = codeElement.numberInt();
    if (0 == code)
        code = ErrorCodes::UnknownError;
    std::string errmsg;
    if (errmsgElement.type() == String) {
        errmsg = errmsgElement.String();
    }
    else if (!errmsgElement.eoo()) {
        errmsg = errmsgElement.toString();
    }
    return Status(ErrorCodes::Error(code), errmsg);
}
예제 #24
0
파일: dbclient.cpp 프로젝트: tanfulai/mongo
    /* TODO: unit tests should run this? */
    void testDbEval() {
        DBClientConnection c;
        string err;
        if ( !c.connect("localhost", err) ) {
            out() << "can't connect to server " << err << endl;
            return;
        }

        if( !c.auth("dwight", "u", "p", err) ) { 
            out() << "can't authenticate " << err << endl;
            return;
        }

        BSONObj info;
        BSONElement retValue;
        BSONObjBuilder b;
        b.append("0", 99);
        BSONObj args = b.done();
        bool ok = c.eval("dwight", "function() { return args[0]; }", info, retValue, &args);
        out() << "eval ok=" << ok << endl;
        out() << "retvalue=" << retValue.toString() << endl;
        out() << "info=" << info.toString() << endl;

        out() << endl;

        int x = 3;
        assert( c.eval("dwight", "function() { return 3; }", x) );

        out() << "***\n";

        BSONObj foo = fromjson("{\"x\":7}");
        out() << foo.toString() << endl;
        int res=0;
        ok = c.eval("dwight", "function(parm1) { return parm1.x; }", foo, res);
        out() << ok << " retval:" << res << endl;
    }
예제 #25
0
   // PD_TRACE_DECLARE_FUNCTION ( SDB__CLSSPLIT_INIT, "_rtnSplit::init" )
   INT32 _rtnSplit::init ( INT32 flags, INT64 numToSkip, INT64 numToReturn,
                           const CHAR * pMatcherBuff,
                           const CHAR * pSelectBuff,
                           const CHAR * pOrderByBuff,
                           const CHAR * pHintBuff )
   {
      INT32 rc = SDB_OK ;
      PD_TRACE_ENTRY ( SDB__CLSSPLIT_INIT ) ;
      const CHAR *pCollectionName = NULL ;
      const CHAR *pTargetName     = NULL ;
      const CHAR *pSourceName     = NULL ;

      try
      {
         BSONObj boRequest ( pMatcherBuff ) ;
         BSONElement beName       = boRequest.getField ( CAT_COLLECTION_NAME ) ;
         BSONElement beTarget     = boRequest.getField ( CAT_TARGET_NAME ) ;
         BSONElement beSplitKey   = boRequest.getField ( CAT_SPLITVALUE_NAME ) ;
         BSONElement beSource     = boRequest.getField ( CAT_SOURCE_NAME ) ;
         BSONElement bePercent    = boRequest.getField ( CAT_SPLITPERCENT_NAME ) ;

         // validate collection name and read
         PD_CHECK ( !beName.eoo() && beName.type() == String,
                    SDB_INVALIDARG, error, PDERROR,
                    "Invalid collection name: %s", beName.toString().c_str() ) ;
         pCollectionName = beName.valuestr() ;
         PD_CHECK ( ossStrlen ( pCollectionName ) <
                       DMS_COLLECTION_SPACE_NAME_SZ +
                       DMS_COLLECTION_NAME_SZ + 1,
                    SDB_INVALIDARG, error, PDERROR,
                    "Collection name is too long: %s", pCollectionName ) ;
         ossStrncpy ( _szCollection, pCollectionName,
                         DMS_COLLECTION_SPACE_NAME_SZ +
                          DMS_COLLECTION_NAME_SZ + 1 ) ;
         // validate target name and read
         PD_CHECK ( !beTarget.eoo() && beTarget.type() == String,
                    SDB_INVALIDARG, error, PDERROR,
                    "Invalid target group name: %s",
                    beTarget.toString().c_str() ) ;
         pTargetName = beTarget.valuestr() ;
         PD_CHECK ( ossStrlen ( pTargetName ) < OP_MAXNAMELENGTH,
                    SDB_INVALIDARG, error, PDERROR,
                    "target group name is too long: %s",
                    pTargetName ) ;
         ossStrncpy ( _szTargetName, pTargetName, OP_MAXNAMELENGTH ) ;
         // validate source name and read
         PD_CHECK ( !beSource.eoo() && beSource.type() == String,
                    SDB_INVALIDARG, error, PDERROR,
                    "Invalid source group name: %s",
                    beSource.toString().c_str() ) ;
         pSourceName = beSource.valuestr() ;
         PD_CHECK ( ossStrlen ( pSourceName ) < OP_MAXNAMELENGTH,
                    SDB_INVALIDARG, error, PDERROR,
                    "source group name is too long: %s",
                    pSourceName ) ;
         ossStrncpy ( _szSourceName, pSourceName, OP_MAXNAMELENGTH ) ;
         // read split key
         PD_CHECK ( !beSplitKey.eoo() && beSplitKey.type() == Object,
                    SDB_INVALIDARG, error, PDERROR,
                    "Invalid split key: %s",
                    beSplitKey.toString().c_str() ) ;
         _splitKey = beSplitKey.embeddedObject () ;
         // percent
         _percent = bePercent.numberDouble() ;
      }
      catch ( std::exception &e )
      {
         PD_RC_CHECK ( SDB_SYS, PDERROR,
                       "Exception handled when parsing split request: %s",
                       e.what() ) ;
      }
      PD_TRACE4 ( SDB__CLSSPLIT_INIT,
                  PD_PACK_STRING ( pCollectionName ),
                  PD_PACK_STRING ( pTargetName ),
                  PD_PACK_STRING ( pSourceName ),
                  PD_PACK_STRING ( _splitKey.toString().c_str() ) ) ;

   done:
      PD_TRACE_EXITRC ( SDB__CLSSPLIT_INIT, rc ) ;
      return rc ;
   error:
      goto done ;
   }
예제 #26
0
vector<intrusive_ptr<DocumentSource>> DocumentSourceBucket::createFromBson(
    BSONElement elem, const intrusive_ptr<ExpressionContext>& pExpCtx) {
    uassert(40201,
            str::stream() << "Argument to $bucket stage must be an object, but found type: "
                          << typeName(elem.type())
                          << ".",
            elem.type() == BSONType::Object);

    const BSONObj bucketObj = elem.embeddedObject();
    BSONObjBuilder groupObjBuilder;
    BSONObjBuilder switchObjBuilder;

    VariablesIdGenerator idGenerator;
    VariablesParseState vps(&idGenerator);

    vector<Value> boundaryValues;
    BSONElement groupByField;
    Value defaultValue;

    bool outputFieldSpecified = false;
    for (auto&& argument : bucketObj) {
        const auto argName = argument.fieldNameStringData();
        if ("groupBy" == argName) {
            groupByField = argument;

            const bool groupByIsExpressionInObject = groupByField.type() == BSONType::Object &&
                groupByField.embeddedObject().firstElementFieldName()[0] == '$';

            const bool groupByIsPrefixedPath =
                groupByField.type() == BSONType::String && groupByField.valueStringData()[0] == '$';
            uassert(40202,
                    str::stream() << "The $bucket 'groupBy' field must be defined as a $-prefixed "
                                     "path or an expression, but found: "
                                  << groupByField.toString(false, false)
                                  << ".",
                    groupByIsExpressionInObject || groupByIsPrefixedPath);
        } else if ("boundaries" == argName) {
            uassert(
                40200,
                str::stream() << "The $bucket 'boundaries' field must be an array, but found type: "
                              << typeName(argument.type())
                              << ".",
                argument.type() == BSONType::Array);

            for (auto&& boundaryElem : argument.embeddedObject()) {
                auto exprConst = getExpressionConstant(boundaryElem, vps);
                uassert(40191,
                        str::stream() << "The $bucket 'boundaries' field must be an array of "
                                         "constant values, but found value: "
                                      << boundaryElem.toString(false, false)
                                      << ".",
                        exprConst);
                boundaryValues.push_back(exprConst->getValue());
            }

            uassert(40192,
                    str::stream()
                        << "The $bucket 'boundaries' field must have at least 2 values, but found "
                        << boundaryValues.size()
                        << " value(s).",
                    boundaryValues.size() >= 2);

            // Make sure that the boundaries are unique, sorted in ascending order, and have the
            // same canonical type.
            for (size_t i = 1; i < boundaryValues.size(); ++i) {
                Value lower = boundaryValues[i - 1];
                Value upper = boundaryValues[i];
                int lowerCanonicalType = canonicalizeBSONType(lower.getType());
                int upperCanonicalType = canonicalizeBSONType(upper.getType());

                uassert(40193,
                        str::stream() << "All values in the the 'boundaries' option to $bucket "
                                         "must have the same type. Found conflicting types "
                                      << typeName(lower.getType())
                                      << " and "
                                      << typeName(upper.getType())
                                      << ".",
                        lowerCanonicalType == upperCanonicalType);
                uassert(40194,
                        str::stream()
                            << "The 'boundaries' option to $bucket must be sorted, but elements "
                            << i - 1
                            << " and "
                            << i
                            << " are not in ascending order ("
                            << lower.toString()
                            << " is not less than "
                            << upper.toString()
                            << ").",
                        pExpCtx->getValueComparator().evaluate(lower < upper));
            }
        } else if ("default" == argName) {
            // If there is a default, make sure that it parses to a constant expression then add
            // default to switch.
            auto exprConst = getExpressionConstant(argument, vps);
            uassert(40195,
                    str::stream()
                        << "The $bucket 'default' field must be a constant expression, but found: "
                        << argument.toString(false, false)
                        << ".",
                    exprConst);

            defaultValue = exprConst->getValue();
            defaultValue.addToBsonObj(&switchObjBuilder, "default");
        } else if ("output" == argName) {
            outputFieldSpecified = true;
            uassert(
                40196,
                str::stream() << "The $bucket 'output' field must be an object, but found type: "
                              << typeName(argument.type())
                              << ".",
                argument.type() == BSONType::Object);

            for (auto&& outputElem : argument.embeddedObject()) {
                groupObjBuilder.append(outputElem);
            }
        } else {
            uasserted(40197, str::stream() << "Unrecognized option to $bucket: " << argName << ".");
        }
    }

    const bool isMissingRequiredField = groupByField.eoo() || boundaryValues.empty();
    uassert(40198,
            "$bucket requires 'groupBy' and 'boundaries' to be specified.",
            !isMissingRequiredField);

    Value lowerValue = boundaryValues.front();
    Value upperValue = boundaryValues.back();
    if (canonicalizeBSONType(defaultValue.getType()) ==
        canonicalizeBSONType(lowerValue.getType())) {
        // If the default has the same canonical type as the bucket's boundaries, then make sure the
        // default is less than the lowest boundary or greater than or equal to the highest
        // boundary.
        const auto& valueCmp = pExpCtx->getValueComparator();
        const bool hasValidDefault = valueCmp.evaluate(defaultValue < lowerValue) ||
            valueCmp.evaluate(defaultValue >= upperValue);
        uassert(40199,
                "The $bucket 'default' field must be less than the lowest boundary or greater than "
                "or equal to the highest boundary.",
                hasValidDefault);
    }

    // Make the branches for the $switch expression.
    BSONArrayBuilder branchesBuilder;
    for (size_t i = 1; i < boundaryValues.size(); ++i) {
        Value lower = boundaryValues[i - 1];
        Value upper = boundaryValues[i];
        BSONObj caseExpr =
            BSON("$and" << BSON_ARRAY(BSON("$gte" << BSON_ARRAY(groupByField << lower))
                                      << BSON("$lt" << BSON_ARRAY(groupByField << upper))));
        branchesBuilder.append(BSON("case" << caseExpr << "then" << lower));
    }

    // Add the $switch expression to the group BSON object.
    switchObjBuilder.append("branches", branchesBuilder.arr());
    groupObjBuilder.append("_id", BSON("$switch" << switchObjBuilder.obj()));

    // If no output is specified, add a count field by default.
    if (!outputFieldSpecified) {
        groupObjBuilder.append("count", BSON("$sum" << 1));
    }

    BSONObj groupObj = BSON("$group" << groupObjBuilder.obj());
    BSONObj sortObj = BSON("$sort" << BSON("_id" << 1));

    auto groupSource = DocumentSourceGroup::createFromBson(groupObj.firstElement(), pExpCtx);
    auto sortSource = DocumentSourceSort::createFromBson(sortObj.firstElement(), pExpCtx);

    return {groupSource, sortSource};
}
예제 #27
0
    // static
    void IndexBoundsBuilder::translate(const MatchExpression* expr, const BSONElement& elt,
                                       OrderedIntervalList* oilOut, bool* exactOut) {
        int direction = (elt.numberInt() >= 0) ? 1 : -1;

        Interval interval;
        bool exact = false;
        oilOut->name = elt.fieldName();

        bool isHashed = false;
        if (mongoutils::str::equals("hashed", elt.valuestrsafe())) {
            isHashed = true;
        }

        if (isHashed) {
            verify(MatchExpression::EQ == expr->matchType()
                   || MatchExpression::MATCH_IN == expr->matchType());
        }

        if (MatchExpression::EQ == expr->matchType()) {
            const EqualityMatchExpression* node =
                static_cast<const EqualityMatchExpression*>(expr);

            // We have to copy the data out of the parse tree and stuff it into the index
            // bounds.  BSONValue will be useful here.
            BSONObj dataObj;

            if (isHashed) {
                dataObj = ExpressionMapping::hash(node->getData());
            }
            else {
                dataObj = objFromElement(node->getData());
            }

            // UNITTEST 11738048
            if (Array == dataObj.firstElement().type()) {
                // XXX: build better bounds
                warning() << "building lazy bounds for " << expr->toString() << endl;
                interval = allValues();
                exact = false;
            }
            else {
                verify(dataObj.isOwned());
                interval = makePointInterval(dataObj);
                // XXX: it's exact if the index isn't sparse
                if (dataObj.firstElement().isNull()) {
                    exact = false;
                }
                else if (isHashed) {
                    exact = false;
                }
                else {
                    exact = true;
                }
            }
        }
        else if (MatchExpression::LTE == expr->matchType()) {
            const LTEMatchExpression* node = static_cast<const LTEMatchExpression*>(expr);
            BSONElement dataElt = node->getData();
            BSONObjBuilder bob;
            bob.appendMinForType("", dataElt.type());
            bob.append(dataElt);
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, true, true);
            // XXX: only exact if not (null or array)
            exact = true;
        }
        else if (MatchExpression::LT == expr->matchType()) {
            const LTMatchExpression* node = static_cast<const LTMatchExpression*>(expr);
            BSONElement dataElt = node->getData();
            BSONObjBuilder bob;
            bob.appendMinForType("", dataElt.type());
            bob.append(dataElt);
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, true, false);
            // XXX: only exact if not (null or array)
            exact = true;
        }
        else if (MatchExpression::GT == expr->matchType()) {
            const GTMatchExpression* node = static_cast<const GTMatchExpression*>(expr);
            BSONElement dataElt = node->getData();
            BSONObjBuilder bob;
            bob.append(node->getData());
            bob.appendMaxForType("", dataElt.type());
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, false, true);
            // XXX: only exact if not (null or array)
            exact = true;
        }
        else if (MatchExpression::GTE == expr->matchType()) {
            const GTEMatchExpression* node = static_cast<const GTEMatchExpression*>(expr);
            BSONElement dataElt = node->getData();

            BSONObjBuilder bob;
            bob.append(dataElt);
            bob.appendMaxForType("", dataElt.type());
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, true, true);
            // XXX: only exact if not (null or array)
            exact = true;
        }
        else if (MatchExpression::REGEX == expr->matchType()) {
            warning() << "building lazy bounds for " << expr->toString() << endl;
            interval = allValues();
            exact = false;
        }
        else if (MatchExpression::MOD == expr->matchType()) {
            BSONObjBuilder bob;
            bob.appendMinForType("", NumberDouble);
            bob.appendMaxForType("", NumberDouble);
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, true, true);
            exact = false;
        }
        else if (MatchExpression::MATCH_IN == expr->matchType()) {
            warning() << "building lazy bounds for " << expr->toString() << endl;
            interval = allValues();
            exact = false;
        }
        else if (MatchExpression::TYPE_OPERATOR == expr->matchType()) {
            const TypeMatchExpression* tme = static_cast<const TypeMatchExpression*>(expr);
            BSONObjBuilder bob;
            bob.appendMinForType("", tme->getData());
            bob.appendMaxForType("", tme->getData());
            BSONObj dataObj = bob.obj();
            verify(dataObj.isOwned());
            interval = makeRangeInterval(dataObj, true, true);
            exact = false;
        }
        else if (MatchExpression::MATCH_IN == expr->matchType()) {
            warning() << "building lazy bounds for " << expr->toString() << endl;
            interval = allValues();
            exact = false;
        }
        else if (MatchExpression::GEO == expr->matchType()) {
            const GeoMatchExpression* gme = static_cast<const GeoMatchExpression*>(expr);
            // Can only do this for 2dsphere.
            if (!mongoutils::str::equals("2dsphere", elt.valuestrsafe())) {
                warning() << "Planner error trying to build geo bounds for " << elt.toString()
                          << " index element.";
                verify(0);
            }

            const S2Region& region = gme->getGeoQuery().getRegion();
            ExpressionMapping::cover2dsphere(region, oilOut);
            *exactOut = false;
            // XXX: restructure this method
            return;
        }
        else {
            warning() << "Planner error, trying to build bounds for expr "
                      << expr->toString() << endl;
            verify(0);
        }

        if (-1 == direction) {
            reverseInterval(&interval);
        }

        oilOut->intervals.push_back(interval);
        *exactOut = exact;
    }
예제 #28
0
    // static
    bool QueryPlannerIXSelect::compatible(const BSONElement& elt,
                                          const IndexEntry& index,
                                          MatchExpression* node) {
        // Historically one could create indices with any particular value for the index spec,
        // including values that now indicate a special index.  As such we have to make sure the
        // index type wasn't overridden before we pay attention to the string in the index key
        // pattern element.
        //
        // e.g. long ago we could have created an index {a: "2dsphere"} and it would
        // be treated as a btree index by an ancient version of MongoDB.  To try to run
        // 2dsphere queries over it would be folly.
        string indexedFieldType;
        if (String != elt.type() || (INDEX_BTREE == index.type)) {
            indexedFieldType = "";
        }
        else {
            indexedFieldType = elt.String();
        }

        // We know elt.fieldname() == node->path().
        MatchExpression::MatchType exprtype = node->matchType();

        if (indexedFieldType.empty()) {
            // Can't check for null w/a sparse index.
            if (exprtype == MatchExpression::EQ && index.sparse) {
                const EqualityMatchExpression* expr
                    = static_cast<const EqualityMatchExpression*>(node);
                if (expr->getData().isNull()) {
                    return false;
                }
            }

            // We can't use a btree-indexed field for geo expressions.
            if (exprtype == MatchExpression::GEO || exprtype == MatchExpression::GEO_NEAR) {
                return false;
            }

            // There are restrictions on when we can use the index if
            // the expression is a NOT.
            if (exprtype == MatchExpression::NOT) {
                // Prevent negated preds from using sparse or
                // multikey indices. We do so for sparse indices because
                // we will fail to return the documents which do not contain
                // the indexed fields.
                //
                // We avoid multikey indices because of the semantics of
                // negations on multikey fields. For example, with multikey
                // index {a:1}, the document {a: [1,2,3]} does *not* match
                // the query {a: {$ne: 3}}. We'd mess this up if we used
                // an index scan over [MinKey, 3) and (3, MaxKey] without
                // a filter.
                if (index.sparse || index.multikey) {
                    return false;
                }
                // Can't index negations of MOD or REGEX
                MatchExpression::MatchType childtype = node->getChild(0)->matchType();
                if (MatchExpression::REGEX == childtype ||
                    MatchExpression::MOD == childtype) {
                    return false;
                }
            }

            // We can only index EQ using text indices.  This is an artificial limitation imposed by
            // FTSSpec::getIndexPrefix() which will fail if there is not an EQ predicate on each
            // index prefix field of the text index.
            //
            // Example for key pattern {a: 1, b: "text"}:
            // - Allowed: node = {a: 7}
            // - Not allowed: node = {a: {$gt: 7}}

            if (INDEX_TEXT != index.type) {
                return true;
            }

            // If we're here we know it's a text index.  Equalities are OK anywhere in a text index.
            if (MatchExpression::EQ == exprtype) {
                return true;
            }

            // Not-equalities can only go in a suffix field of an index kp.  We look through the key
            // pattern to see if the field we're looking at now appears as a prefix.  If so, we
            // can't use this index for it.
            BSONObjIterator specIt(index.keyPattern);
            while (specIt.more()) {
                BSONElement elt = specIt.next();
                // We hit the dividing mark between prefix and suffix, so whatever field we're
                // looking at is a suffix, since it appears *after* the dividing mark between the
                // two.  As such, we can use the index.
                if (String == elt.type()) {
                    return true;
                }

                // If we're here, we're still looking at prefix elements.  We know that exprtype
                // isn't EQ so we can't use this index.
                if (node->path() == elt.fieldNameStringData()) {
                    return false;
                }
            }

            // NOTE: This shouldn't be reached.  Text index implies there is a separator implies we
            // will always hit the 'return true' above.
            invariant(0);
            return true;
        }
        else if (IndexNames::HASHED == indexedFieldType) {
            return exprtype == MatchExpression::MATCH_IN || exprtype == MatchExpression::EQ;
        }
        else if (IndexNames::GEO_2DSPHERE == indexedFieldType) {
            if (exprtype == MatchExpression::GEO) {
                // within or intersect.
                GeoMatchExpression* gme = static_cast<GeoMatchExpression*>(node);
                const GeoQuery& gq = gme->getGeoQuery();
                const GeometryContainer& gc = gq.getGeometry();
                return gc.hasS2Region();
            }
            else if (exprtype == MatchExpression::GEO_NEAR) {
                GeoNearMatchExpression* gnme = static_cast<GeoNearMatchExpression*>(node);
                // Make sure the near query is compatible with 2dsphere.
                if (gnme->getData().centroid.crs == SPHERE || gnme->getData().isNearSphere) {
                    return true;
                }
            }
            return false;
        }
        else if (IndexNames::GEO_2D == indexedFieldType) {
            if (exprtype == MatchExpression::GEO_NEAR) {
                GeoNearMatchExpression* gnme = static_cast<GeoNearMatchExpression*>(node);
                return gnme->getData().centroid.crs == FLAT;
            }
            else if (exprtype == MatchExpression::GEO) {
                // 2d only supports within.
                GeoMatchExpression* gme = static_cast<GeoMatchExpression*>(node);
                const GeoQuery& gq = gme->getGeoQuery();
                if (GeoQuery::WITHIN != gq.getPred()) {
                    return false;
                }

                const GeometryContainer& gc = gq.getGeometry();

                // 2d indices answer flat queries.
                if (gc.hasFlatRegion()) {
                    return true;
                }

                // 2d indices can answer centerSphere queries.
                if (NULL == gc._cap.get()) {
                    return false;
                }

                verify(SPHERE == gc._cap->crs);
                const Circle& circle = gc._cap->circle;

                // No wrapping around the edge of the world is allowed in 2d centerSphere.
                return twoDWontWrap(circle, index);
            }
            return false;
        }
        else if (IndexNames::TEXT == indexedFieldType) {
            return (exprtype == MatchExpression::TEXT);
        }
        else if (IndexNames::GEO_HAYSTACK == indexedFieldType) {
            return false;
        }
        else {
            warning() << "Unknown indexing for node " << node->toString()
                      << " and field " << elt.toString() << endl;
            verify(0);
        }
    }
예제 #29
0
// static
Status ParsedProjection::make(const BSONObj& spec,
                              const MatchExpression* const query,
                              ParsedProjection** out,
                              const MatchExpressionParser::WhereCallback& whereCallback) {
    // Are we including or excluding fields?  Values:
    // -1 when we haven't initialized it.
    // 1 when we're including
    // 0 when we're excluding.
    int include_exclude = -1;

    // If any of these are 'true' the projection isn't covered.
    bool include = true;
    bool hasNonSimple = false;
    bool hasDottedField = false;

    bool includeID = true;

    bool hasIndexKeyProjection = false;

    bool wantGeoNearPoint = false;
    bool wantGeoNearDistance = false;

    // Until we see a positional or elemMatch operator we're normal.
    ArrayOpType arrayOpType = ARRAY_OP_NORMAL;

    BSONObjIterator it(spec);
    while (it.more()) {
        BSONElement e = it.next();

        if (!e.isNumber() && !e.isBoolean()) {
            hasNonSimple = true;
        }

        if (Object == e.type()) {
            BSONObj obj = e.embeddedObject();
            if (1 != obj.nFields()) {
                return Status(ErrorCodes::BadValue, ">1 field in obj: " + obj.toString());
            }

            BSONElement e2 = obj.firstElement();
            if (mongoutils::str::equals(e2.fieldName(), "$slice")) {
                if (e2.isNumber()) {
                    // This is A-OK.
                } else if (e2.type() == Array) {
                    BSONObj arr = e2.embeddedObject();
                    if (2 != arr.nFields()) {
                        return Status(ErrorCodes::BadValue, "$slice array wrong size");
                    }

                    BSONObjIterator it(arr);
                    // Skip over 'skip'.
                    it.next();
                    int limit = it.next().numberInt();
                    if (limit <= 0) {
                        return Status(ErrorCodes::BadValue, "$slice limit must be positive");
                    }
                } else {
                    return Status(ErrorCodes::BadValue,
                                  "$slice only supports numbers and [skip, limit] arrays");
                }
            } else if (mongoutils::str::equals(e2.fieldName(), "$elemMatch")) {
                // Validate $elemMatch arguments and dependencies.
                if (Object != e2.type()) {
                    return Status(ErrorCodes::BadValue,
                                  "elemMatch: Invalid argument, object required.");
                }

                if (ARRAY_OP_POSITIONAL == arrayOpType) {
                    return Status(ErrorCodes::BadValue,
                                  "Cannot specify positional operator and $elemMatch.");
                }

                if (mongoutils::str::contains(e.fieldName(), '.')) {
                    return Status(ErrorCodes::BadValue,
                                  "Cannot use $elemMatch projection on a nested field.");
                }

                arrayOpType = ARRAY_OP_ELEM_MATCH;

                // Create a MatchExpression for the elemMatch.
                BSONObj elemMatchObj = e.wrap();
                verify(elemMatchObj.isOwned());

                // TODO: Is there a faster way of validating the elemMatchObj?
                StatusWithMatchExpression swme =
                    MatchExpressionParser::parse(elemMatchObj, whereCallback);
                if (!swme.isOK()) {
                    return swme.getStatus();
                }
                delete swme.getValue();
            } else if (mongoutils::str::equals(e2.fieldName(), "$meta")) {
                // Field for meta must be top level.  We can relax this at some point.
                if (mongoutils::str::contains(e.fieldName(), '.')) {
                    return Status(ErrorCodes::BadValue, "field for $meta cannot be nested");
                }

                // Make sure the argument to $meta is something we recognize.
                // e.g. {x: {$meta: "textScore"}}
                if (String != e2.type()) {
                    return Status(ErrorCodes::BadValue, "unexpected argument to $meta in proj");
                }

                if (e2.valuestr() != LiteParsedQuery::metaTextScore &&
                    e2.valuestr() != LiteParsedQuery::metaRecordId &&
                    e2.valuestr() != LiteParsedQuery::metaIndexKey &&
                    e2.valuestr() != LiteParsedQuery::metaGeoNearDistance &&
                    e2.valuestr() != LiteParsedQuery::metaGeoNearPoint) {
                    return Status(ErrorCodes::BadValue, "unsupported $meta operator: " + e2.str());
                }

                // This clobbers everything else.
                if (e2.valuestr() == LiteParsedQuery::metaIndexKey) {
                    hasIndexKeyProjection = true;
                } else if (e2.valuestr() == LiteParsedQuery::metaGeoNearDistance) {
                    wantGeoNearDistance = true;
                } else if (e2.valuestr() == LiteParsedQuery::metaGeoNearPoint) {
                    wantGeoNearPoint = true;
                }
            } else {
                return Status(ErrorCodes::BadValue,
                              string("Unsupported projection option: ") + e.toString());
            }
        } else if (mongoutils::str::equals(e.fieldName(), "_id") && !e.trueValue()) {
            includeID = false;
        } else {
            // Projections of dotted fields aren't covered.
            if (mongoutils::str::contains(e.fieldName(), '.')) {
                hasDottedField = true;
            }

            // Validate input.
            if (include_exclude == -1) {
                // If we haven't specified an include/exclude, initialize include_exclude.
                // We expect further include/excludes to match it.
                include_exclude = e.trueValue();
                include = !e.trueValue();
            } else if (static_cast<bool>(include_exclude) != e.trueValue()) {
                // Make sure that the incl./excl. matches the previous.
                return Status(ErrorCodes::BadValue,
                              "Projection cannot have a mix of inclusion and exclusion.");
            }
        }


        if (_isPositionalOperator(e.fieldName())) {
            // Validate the positional op.
            if (!e.trueValue()) {
                return Status(ErrorCodes::BadValue,
                              "Cannot exclude array elements with the positional operator.");
            }

            if (ARRAY_OP_POSITIONAL == arrayOpType) {
                return Status(ErrorCodes::BadValue,
                              "Cannot specify more than one positional proj. per query.");
            }

            if (ARRAY_OP_ELEM_MATCH == arrayOpType) {
                return Status(ErrorCodes::BadValue,
                              "Cannot specify positional operator and $elemMatch.");
            }

            std::string after = mongoutils::str::after(e.fieldName(), ".$");
            if (mongoutils::str::contains(after, ".$")) {
                mongoutils::str::stream ss;
                ss << "Positional projection '" << e.fieldName() << "' contains "
                   << "the positional operator more than once.";
                return Status(ErrorCodes::BadValue, ss);
            }

            std::string matchfield = mongoutils::str::before(e.fieldName(), '.');
            if (!_hasPositionalOperatorMatch(query, matchfield)) {
                mongoutils::str::stream ss;
                ss << "Positional projection '" << e.fieldName() << "' does not "
                   << "match the query document.";
                return Status(ErrorCodes::BadValue, ss);
            }

            arrayOpType = ARRAY_OP_POSITIONAL;
        }
    }

    // Fill out the returned obj.
    unique_ptr<ParsedProjection> pp(new ParsedProjection());

    // The positional operator uses the MatchDetails from the query
    // expression to know which array element was matched.
    pp->_requiresMatchDetails = arrayOpType == ARRAY_OP_POSITIONAL;

    // Save the raw spec.  It should be owned by the LiteParsedQuery.
    verify(spec.isOwned());
    pp->_source = spec;
    pp->_returnKey = hasIndexKeyProjection;

    // Dotted fields aren't covered, non-simple require match details, and as for include, "if
    // we default to including then we can't use an index because we don't know what we're
    // missing."
    pp->_requiresDocument = include || hasNonSimple || hasDottedField;

    // Add geoNear projections.
    pp->_wantGeoNearPoint = wantGeoNearPoint;
    pp->_wantGeoNearDistance = wantGeoNearDistance;

    // If it's possible to compute the projection in a covered fashion, populate _requiredFields
    // so the planner can perform projection analysis.
    if (!pp->_requiresDocument) {
        if (includeID) {
            pp->_requiredFields.push_back("_id");
        }

        // The only way we could be here is if spec is only simple non-dotted-field projections.
        // Therefore we can iterate over spec to get the fields required.
        BSONObjIterator srcIt(spec);
        while (srcIt.more()) {
            BSONElement elt = srcIt.next();
            // We've already handled the _id field before entering this loop.
            if (includeID && mongoutils::str::equals(elt.fieldName(), "_id")) {
                continue;
            }
            if (elt.trueValue()) {
                pp->_requiredFields.push_back(elt.fieldName());
            }
        }
    }

    // returnKey clobbers everything.
    if (hasIndexKeyProjection) {
        pp->_requiresDocument = false;
    }

    *out = pp.release();
    return Status::OK();
}
Status parseRolesInfoCommand(const BSONObj& cmdObj, StringData dbname, RolesInfoArgs* parsedArgs) {
    unordered_set<std::string> validFieldNames;
    validFieldNames.insert("rolesInfo");
    validFieldNames.insert("showPrivileges");
    validFieldNames.insert("showAuthenticationRestrictions");
    validFieldNames.insert("showBuiltinRoles");

    Status status = _checkNoExtraFields(cmdObj, "rolesInfo", validFieldNames);
    if (!status.isOK()) {
        return status;
    }

    if (cmdObj["rolesInfo"].numberInt() == 1) {
        parsedArgs->allForDB = true;
    } else if (cmdObj["rolesInfo"].type() == Array) {
        status = parseRoleNamesFromBSONArray(
            BSONArray(cmdObj["rolesInfo"].Obj()), dbname, &parsedArgs->roleNames);
        if (!status.isOK()) {
            return status;
        }
    } else {
        RoleName name;
        status = _parseNameFromBSONElement(cmdObj["rolesInfo"],
                                           dbname,
                                           AuthorizationManager::ROLE_NAME_FIELD_NAME,
                                           AuthorizationManager::ROLE_DB_FIELD_NAME,
                                           &name);
        if (!status.isOK()) {
            return status;
        }
        parsedArgs->roleNames.push_back(name);
    }

    BSONElement showPrivileges = cmdObj["showPrivileges"];
    if (showPrivileges.eoo()) {
        parsedArgs->privilegeFormat = PrivilegeFormat::kOmit;
    } else if (showPrivileges.isNumber() || showPrivileges.isBoolean()) {
        parsedArgs->privilegeFormat =
            showPrivileges.trueValue() ? PrivilegeFormat::kShowSeparate : PrivilegeFormat::kOmit;
    } else if (showPrivileges.type() == BSONType::String &&
               showPrivileges.String() == "asUserFragment") {
        parsedArgs->privilegeFormat = PrivilegeFormat::kShowAsUserFragment;
    } else {
        return Status(ErrorCodes::FailedToParse,
                      str::stream() << "Failed to parse 'showPrivileges'. 'showPrivileges' should "
                                       "either be a boolean or the string 'asUserFragment', given: "
                                    << showPrivileges.toString());
    }

    const auto showAuthenticationRestrictions = cmdObj["showAuthenticationRestrictions"];
    if (showAuthenticationRestrictions.eoo()) {
        parsedArgs->authenticationRestrictionsFormat = AuthenticationRestrictionsFormat::kOmit;
    } else if (parsedArgs->privilegeFormat == PrivilegeFormat::kShowAsUserFragment) {
        return Status(
            ErrorCodes::UnsupportedFormat,
            "showAuthenticationRestrictions may not be used with showPrivileges='asUserFragment'");
    } else {
        bool show;
        status = bsonExtractBooleanField(cmdObj, "showAuthenticationRestrictions", &show);
        if (!status.isOK()) {
            return status;
        }
        parsedArgs->authenticationRestrictionsFormat = show
            ? AuthenticationRestrictionsFormat::kShow
            : AuthenticationRestrictionsFormat::kOmit;
    }

    status = bsonExtractBooleanFieldWithDefault(
        cmdObj, "showBuiltinRoles", false, &parsedArgs->showBuiltinRoles);
    if (!status.isOK()) {
        return status;
    }

    return Status::OK();
}