void BSONCollectionCatalogEntry::MetaData::parse(const BSONObj& obj) { ns = obj["ns"].valuestrsafe(); if (obj["options"].isABSONObj()) { options.parse(obj["options"].Obj()); } BSONElement e = obj["indexes"]; if (e.isABSONObj()) { std::vector<BSONElement> entries = e.Array(); for (unsigned i = 0; i < entries.size(); i++) { BSONObj idx = entries[i].Obj(); IndexMetaData imd; imd.spec = idx["spec"].Obj().getOwned(); imd.ready = idx["ready"].trueValue(); if (idx.hasField("head")) { imd.head = RecordId(idx["head"].Long()); } else { imd.head = RecordId(idx["head_a"].Int(), idx["head_b"].Int()); } imd.multikey = idx["multikey"].trueValue(); indexes.push_back(imd); } } }
bool GeoParser::isGeoJSONPolygon(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_POLYGON != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); // Must be at least one element, the outer shell if (coordinates.empty()) { return false; } // Verify that the shell is a bunch'a coordinates. for (size_t i = 0; i < coordinates.size(); ++i) { if (Array != coordinates[i].type()) { return false; } const vector<BSONElement>& thisLoop = coordinates[i].Array(); // A triangle is the simplest 2d shape, and we repeat a vertex, so, 4. if (thisLoop.size() < 4) { return false; } if (!isArrayOfCoordinates(thisLoop)) { return false; } if (!isLoopClosed(thisLoop)) { return false; } } return true; }
virtual void parseElementRecursive(const BSONElement& element, string& key, int elementIndex=0, int elementCount=1, int arrayIndex = -1, int arrayCount = 0) { BSONType btype = element.type(); switch (btype) { case BSONType::Object: { const BSONObj& bobj = element.Obj(); string k(element.fieldName()); parseObjectRecursive(bobj, k, elementIndex, elementCount, arrayIndex, arrayCount); } break; case BSONType::Array: { stack.push(BSONParserStackItem::ItemType::ARRAY, element, key, elementIndex, elementCount, arrayIndex, arrayCount); std::vector<BSONElement> elementArray = element.Array(); int elementArrayCount = elementArray.size(); visitor.onArrayStart(stack); int elementArrayIndex = 0; for (BSONElement e : elementArray) { string k(e.fieldName()); parseElementRecursive(e, k, elementIndex, elementCount, elementArrayIndex++, elementArrayCount); } visitor.onArrayEnd(stack); stack.drop(); } break; default: { stack.push(BSONParserStackItem::ItemType::ELEMENT, element, key, elementIndex, elementCount, arrayIndex, arrayCount); visitor.onElement(stack); stack.drop(); } break; } }
// { "type": "GeometryCollection", // "geometries": [ // { "type": "Point", // "coordinates": [100.0, 0.0] // }, // { "type": "LineString", // "coordinates": [ [101.0, 0.0], [102.0, 1.0] ] // } // ] // } Status GeoParser::parseGeometryCollection(const BSONObj& obj, bool skipValidation, GeometryCollection* out) { BSONElement coordElt = obj.getFieldDotted(GEOJSON_GEOMETRIES); if (Array != coordElt.type()) return BAD_VALUE("GeometryCollection geometries must be an array"); const vector<BSONElement>& geometries = coordElt.Array(); if (0 == geometries.size()) return BAD_VALUE("GeometryCollection geometries must have at least 1 element"); for (size_t i = 0; i < geometries.size(); ++i) { if (Object != geometries[i].type()) return BAD_VALUE("Element " << i << " of \"geometries\" is not an object"); const BSONObj& geoObj = geometries[i].Obj(); GeoJSONType type = parseGeoJSONType(geoObj); if (GEOJSON_UNKNOWN == type) return BAD_VALUE("Unknown GeoJSON type: " << geometries[i].toString(false)); if (GEOJSON_GEOMETRY_COLLECTION == type) return BAD_VALUE( "GeometryCollections cannot be nested: " << geometries[i].toString(false)); Status status = Status::OK(); if (GEOJSON_POINT == type) { out->points.resize(out->points.size() + 1); status = parseGeoJSONPoint(geoObj, &out->points.back()); } else if (GEOJSON_LINESTRING == type) { out->lines.mutableVector().push_back(new LineWithCRS()); status = parseGeoJSONLine(geoObj, skipValidation, out->lines.vector().back()); } else if (GEOJSON_POLYGON == type) { out->polygons.mutableVector().push_back(new PolygonWithCRS()); status = parseGeoJSONPolygon(geoObj, skipValidation, out->polygons.vector().back()); } else if (GEOJSON_MULTI_POINT == type) { out->multiPoints.mutableVector().push_back(new MultiPointWithCRS()); status = parseMultiPoint(geoObj, out->multiPoints.mutableVector().back()); } else if (GEOJSON_MULTI_LINESTRING == type) { out->multiLines.mutableVector().push_back(new MultiLineWithCRS()); status = parseMultiLine(geoObj, skipValidation, out->multiLines.mutableVector().back()); } else if (GEOJSON_MULTI_POLYGON == type) { out->multiPolygons.mutableVector().push_back(new MultiPolygonWithCRS()); status = parseMultiPolygon( geoObj, skipValidation, out->multiPolygons.mutableVector().back()); } else { // Should not reach here. invariant(false); } // Check parsing result. if (!status.isOK()) return status; } return Status::OK(); }
// static Status SetFilter::set(OperationContext* opCtx, QuerySettings* querySettings, PlanCache* planCache, const string& ns, const BSONObj& cmdObj) { // indexes - required BSONElement indexesElt = cmdObj.getField("indexes"); if (indexesElt.eoo()) { return Status(ErrorCodes::BadValue, "required field indexes missing"); } if (indexesElt.type() != mongo::Array) { return Status(ErrorCodes::BadValue, "required field indexes must be an array"); } vector<BSONElement> indexesEltArray = indexesElt.Array(); if (indexesEltArray.empty()) { return Status(ErrorCodes::BadValue, "required field indexes must contain at least one index"); } BSONObjSet indexes = SimpleBSONObjComparator::kInstance.makeBSONObjSet(); stdx::unordered_set<std::string> indexNames; for (vector<BSONElement>::const_iterator i = indexesEltArray.begin(); i != indexesEltArray.end(); ++i) { const BSONElement& elt = *i; if (elt.type() == BSONType::Object) { BSONObj obj = elt.Obj(); if (obj.isEmpty()) { return Status(ErrorCodes::BadValue, "index specification cannot be empty"); } indexes.insert(obj.getOwned()); } else if (elt.type() == BSONType::String) { indexNames.insert(elt.String()); } else { return Status(ErrorCodes::BadValue, "each item in indexes must be an object or string"); } } auto statusWithCQ = PlanCacheCommand::canonicalize(opCtx, ns, cmdObj); if (!statusWithCQ.isOK()) { return statusWithCQ.getStatus(); } unique_ptr<CanonicalQuery> cq = std::move(statusWithCQ.getValue()); // Add allowed indices to query settings, overriding any previous entries. querySettings->setAllowedIndices(*cq, planCache->computeKey(*cq), indexes, indexNames); // Remove entry from plan cache. planCache->remove(*cq).transitional_ignore(); LOG(0) << "Index filter set on " << ns << " " << redact(cq->toStringShort()) << " " << indexesElt; return Status::OK(); }
/* **************************************************************************** * * compoundVectorResponse - */ void compoundVectorResponse(orion::CompoundValueNode* cvP, const BSONElement& be) { std::vector<BSONElement> vec = be.Array(); cvP->valueType = orion::ValueTypeVector; for (unsigned int ix = 0; ix < vec.size(); ++ix) { BSONElement e = vec[ix]; addCompoundNode(cvP, e); } }
bool CMongodbModel::GetArrayFieldValue(const BSONObj& boInfo, string strFieldName, std::vector<BSONElement>& vtValue) { bool bResult = false; if (boInfo.hasField(strFieldName)) { BSONElement beInfo = boInfo[strFieldName]; vtValue = beInfo.Array(); bResult = true; } return bResult; }
bool GeoParser::parseMultiPoint(const BSONObj &obj, MultiPointWithCRS *out) { out->points.clear(); BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); const vector<BSONElement>& coordinates = coordElt.Array(); out->points.resize(coordinates.size()); out->cells.resize(coordinates.size()); for (size_t i = 0; i < coordinates.size(); ++i) { const vector<BSONElement>& thisCoord = coordinates[i].Array(); out->points[i] = coordToPoint(thisCoord[0].Number(), thisCoord[1].Number()); out->cells[i] = S2Cell(out->points[i]); } return true; }
Status checkAuthForApplyOpsCommand(OperationContext* txn, const std::string& dbname, const BSONObj& cmdObj) { AuthorizationSession* authSession = AuthorizationSession::get(txn->getClient()); ApplyOpsValidity validity = validateApplyOpsCommand(cmdObj); if (validity == ApplyOpsValidity::kNeedsSuperuser) { std::vector<Privilege> universalPrivileges; RoleGraph::generateUniversalPrivileges(&universalPrivileges); if (!authSession->isAuthorizedForPrivileges(universalPrivileges)) { return Status(ErrorCodes::Unauthorized, "Unauthorized"); } return Status::OK(); } fassert(40314, validity == ApplyOpsValidity::kOk); boost::optional<DisableDocumentValidation> maybeDisableValidation; if (shouldBypassDocumentValidationForCommand(cmdObj)) maybeDisableValidation.emplace(txn); const bool alwaysUpsert = cmdObj.hasField("alwaysUpsert") ? cmdObj["alwaysUpsert"].trueValue() : true; checkBSONType(BSONType::Array, cmdObj.firstElement()); for (const BSONElement& e : cmdObj.firstElement().Array()) { checkBSONType(BSONType::Object, e); Status status = checkOperationAuthorization(txn, dbname, e.Obj(), alwaysUpsert); if (!status.isOK()) { return status; } } BSONElement preconditions = cmdObj["preCondition"]; if (!preconditions.eoo()) { for (const BSONElement& precondition : preconditions.Array()) { checkBSONType(BSONType::Object, precondition); BSONElement nsElem = precondition.Obj()["ns"]; checkBSONType(BSONType::String, nsElem); NamespaceString nss(nsElem.checkAndGetStringData()); if (!authSession->isAuthorizedForActionsOnResource( ResourcePattern::forExactNamespace(nss), ActionType::find)) { return Status(ErrorCodes::Unauthorized, "Unauthorized to check precondition"); } } } return Status::OK(); }
StatusWith<std::vector<BSONElement>> DatabasesCloner::_parseListDatabasesResponse( BSONObj dbResponse) { if (!dbResponse.hasField("databases")) { return Status(ErrorCodes::BadValue, "The 'listDatabases' response does not contain a 'databases' field."); } BSONElement response = dbResponse["databases"]; try { return response.Array(); } catch (const AssertionException&) { return Status(ErrorCodes::BadValue, "The 'listDatabases' response is unable to be transformed into an array."); } }
// static Status SetFilter::set(OperationContext* txn, QuerySettings* querySettings, PlanCache* planCache, const string& ns, const BSONObj& cmdObj) { // indexes - required BSONElement indexesElt = cmdObj.getField("indexes"); if (indexesElt.eoo()) { return Status(ErrorCodes::BadValue, "required field indexes missing"); } if (indexesElt.type() != mongo::Array) { return Status(ErrorCodes::BadValue, "required field indexes must be an array"); } vector<BSONElement> indexesEltArray = indexesElt.Array(); if (indexesEltArray.empty()) { return Status(ErrorCodes::BadValue, "required field indexes must contain at least one index"); } vector<BSONObj> indexes; for (vector<BSONElement>::const_iterator i = indexesEltArray.begin(); i != indexesEltArray.end(); ++i) { const BSONElement& elt = *i; if (!elt.isABSONObj()) { return Status(ErrorCodes::BadValue, "each item in indexes must be an object"); } BSONObj obj = elt.Obj(); if (obj.isEmpty()) { return Status(ErrorCodes::BadValue, "index specification cannot be empty"); } indexes.push_back(obj.getOwned()); } auto statusWithCQ = PlanCacheCommand::canonicalize(txn, ns, cmdObj); if (!statusWithCQ.isOK()) { return statusWithCQ.getStatus(); } unique_ptr<CanonicalQuery> cq = std::move(statusWithCQ.getValue()); // Add allowed indices to query settings, overriding any previous entries. querySettings->setAllowedIndices(*cq, planCache->computeKey(*cq), indexes); // Remove entry from plan cache. planCache->remove(*cq); LOG(0) << "Index filter set on " << ns << " " << cq->toStringShort() << " " << indexesElt; return Status::OK(); }
StatusWith<std::vector<BSONElement>> CollectionCloner::_parseParallelCollectionScanResponse( BSONObj resp) { if (!resp.hasField("cursors")) { return Status(ErrorCodes::CursorNotFound, "The 'parallelCollectionScan' response does not contain a 'cursors' field."); } BSONElement response = resp["cursors"]; if (response.type() == BSONType::Array) { return response.Array(); } else { return Status( ErrorCodes::FailedToParse, "The 'parallelCollectionScan' response is unable to be transformed into an array."); } }
static bool isGeoJSONPolygon(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_POLYGON != type.String()) { return false; } if (!GeoParser::crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } return isGeoJSONPolygonCoordinates(coordElt.Array()); }
bool GeoParser::isLine(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_LINESTRING != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } return isValidLineString(coordElt.Array()); }
bool GeoParser::isGeoJSONLineString(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_LINESTRING != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinateArray = coordElt.Array(); if (coordinateArray.size() < 2) { return false; } return isArrayOfCoordinates(coordinateArray); }
Status ModifierPullAll::init(const BSONElement& modExpr, const Options& opts, bool* positional) { // // field name analysis // // Break down the field name into its 'dotted' components (aka parts) and check that // there are no empty parts. _fieldRef.parse(modExpr.fieldName()); Status status = fieldchecker::isUpdatable(_fieldRef); if (! status.isOK()) { return status; } // If a $-positional operator was used, get the index in which it occurred // and ensure only one occurrence. size_t foundCount; bool foundDollar = fieldchecker::isPositional(_fieldRef, &_positionalPathIndex, &foundCount); if (positional) *positional = foundDollar; if (foundDollar && foundCount > 1) { return Status(ErrorCodes::BadValue, str::stream() << "Too many positional (i.e. '$') elements found in path '" << _fieldRef.dottedField() << "'"); } // // value analysis // if (modExpr.type() != Array) { return Status(ErrorCodes::BadValue, str::stream() << "$pullAll requires an array argument but was given a " << typeName(modExpr.type())); } // store the stuff to remove later _elementsToFind = modExpr.Array(); return Status::OK(); }
bool GeoParser::isMultiPoint(const BSONObj &obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_MULTI_POINT != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); if (0 == coordinates.size()) { return false; } return isArrayOfCoordinates(coordinates); }
bool run(const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) { string ns = dbname + "." + cmdObj.firstElement().valuestrsafe(); const NamespaceDetails* nsd = nsdetails(ns.c_str()); if (!cmdLine.quiet) { tlog() << "CMD: indexStats " << ns << endl; } if (!nsd) { errmsg = "ns not found"; return false; } IndexStatsParams params; // { index: _index_name } BSONElement indexName = cmdObj["index"]; if (!indexName.ok() || indexName.type() != String) { errmsg = "an index name is required, use {index: \"indexname\"}"; return false; } params.indexName = indexName.String(); BSONElement expandNodes = cmdObj["expandNodes"]; if (expandNodes.ok()) { if (expandNodes.type() != mongo::Array) { errmsg = "expandNodes must be an array of numbers"; return false; } vector<BSONElement> arr = expandNodes.Array(); for (vector<BSONElement>::const_iterator it = arr.begin(); it != arr.end(); ++it) { if (!it->isNumber()) { errmsg = "expandNodes must be an array of numbers"; return false; } params.expandNodes.push_back(int(it->Number())); } } BSONObjBuilder resultBuilder; if (!runInternal(nsd, params, errmsg, resultBuilder)) return false; result.appendElements(resultBuilder.obj()); return true; }
int DbBaseConn::ExecQueryFindGroupBy( const string& sTableName, Query query, BSON_VEC &bonsobjResultList, int* nListNum, GROUPBY_TYPE gtype, string groupbystr, string keystr) { const string ns = m_sDbName+"."+sTableName; string tmpmapstr = "function"; tmpmapstr = tmpmapstr + " Map(){ emit(this." + groupbystr + ", {" + keystr + ": this." + keystr + "});}"; const char *map = tmpmapstr.c_str(); string tmpreducestr = "function"; tmpreducestr = tmpreducestr + " Reduce(key, values) { var reduced = {" + keystr + ":0}; values.forEach(function(val) { reduced." + keystr; switch(gtype) { case GTYPE_COUNT: { tmpreducestr += " += 1;}); return reduced;}"; break; } case GTYPE_SUM: { tmpreducestr += " += val." + keystr + ";}); return reduced;}"; break; } default: break; } const char* reduce = tmpreducestr.c_str(); const string outcoll = ns + ".out"; BSONObj out; out = dbClientConn.mapreduce(ns, map, reduce, BSONObj()); // default to inline MONGO_PRINT(out); if (!out["results"].isNull()) { BSONElement ele = out["results"]; vector<BSONElement> vec_ele = ele.Array(); for (int i=0; i<(int)vec_ele.size(); ++i) { BSONObj tmpbo = vec_ele[i].Obj(); bob bbtmp; bbtmp.append(groupbystr, tmpbo["_id"].Number()); bbtmp.append(keystr, tmpbo["value"][keystr].Number()); bo boStruct = bbtmp.obj(); bonsobjResultList.push_back(boStruct); } } return 0; }
void RocksCollectionCatalogEntry::MetaData::parse( const BSONObj& obj ) { ns = obj["ns"].valuestrsafe(); BSONElement e = obj["indexes"]; if ( e.isABSONObj() ) { std::vector<BSONElement> entries = e.Array(); for ( unsigned i = 0; i < entries.size(); i++ ) { BSONObj idx = entries[i].Obj(); IndexMetaData imd; imd.spec = idx["spec"].Obj(); imd.ready = idx["ready"].trueValue(); imd.head = DiskLoc( idx["head_a"].Int(), idx["head_b"].Int() ); imd.multikey = idx["multikey"].trueValue(); indexes.push_back( imd ); } } }
static Status parseBigSimplePolygonCoordinates(const BSONElement& elem, BigSimplePolygon* out) { if (Array != elem.type()) return BAD_VALUE("Coordinates of polygon must be an array"); const vector<BSONElement>& coordinates = elem.Array(); // Only one loop is allowed in a BigSimplePolygon if (coordinates.size() != 1) { return BAD_VALUE( "Only one simple loop is allowed in a big polygon: " << elem.toString(false)); } vector<S2Point> exteriorVertices; Status status = Status::OK(); string err; status = parseArrayOfCoordinates(coordinates.front(), &exteriorVertices); if (!status.isOK()) return status; status = isLoopClosed(exteriorVertices, coordinates.front()); if (!status.isOK()) return status; eraseDuplicatePoints(&exteriorVertices); // The last point is duplicated. We drop it, since S2Loop expects no // duplicate points exteriorVertices.resize(exteriorVertices.size() - 1); // At least 3 vertices. if (exteriorVertices.size() < 3) { return BAD_VALUE("Loop must have at least 3 different vertices: " << elem.toString(false)); } unique_ptr<S2Loop> loop(new S2Loop(exteriorVertices)); // Check whether this loop is valid. if (!loop->IsValid(&err)) { return BAD_VALUE("Loop is not valid: " << elem.toString(false) << " " << err); } out->Init(loop.release()); return Status::OK(); }
// static Status SetFilter::set(QuerySettings* querySettings, PlanCache* planCache, const string& ns, const BSONObj& cmdObj) { // indexes - required BSONElement indexesElt = cmdObj.getField("indexes"); if (indexesElt.eoo()) { return Status(ErrorCodes::BadValue, "required field indexes missing"); } if (indexesElt.type() != mongo::Array) { return Status(ErrorCodes::BadValue, "required field indexes must be an array"); } vector<BSONElement> indexesEltArray = indexesElt.Array(); if (indexesEltArray.empty()) { return Status(ErrorCodes::BadValue, "required field indexes must contain at least one index"); } vector<BSONObj> indexes; for (vector<BSONElement>::const_iterator i = indexesEltArray.begin(); i != indexesEltArray.end(); ++i) { const BSONElement& elt = *i; if (!elt.isABSONObj()) { return Status(ErrorCodes::BadValue, "each item in indexes must be an object"); } BSONObj obj = elt.Obj(); if (obj.isEmpty()) { return Status(ErrorCodes::BadValue, "index specification cannot be empty"); } indexes.push_back(obj.getOwned()); } CanonicalQuery* cqRaw; Status status = PlanCacheCommand::canonicalize(ns, cmdObj, &cqRaw); if (!status.isOK()) { return status; } scoped_ptr<CanonicalQuery> cq(cqRaw); // Add allowed indices to query settings, overriding any previous entries. querySettings->setAllowedIndices(*cq, indexes); // Remove entry from plan cache. planCache->remove(*cq); return Status::OK(); }
//// What we publicly export bool GeoParser::isGeoJSONPoint(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_POINT != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); if (coordinates.size() != 2) { return false; } if (!coordinates[0].isNumber() || !coordinates[1].isNumber()) { return false; } double lat = coordinates[1].Number(); return lat >= -90 && lat <= 90; }
bool GeoParser::isGeoJSONLineString(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_LINESTRING != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinateArray = coordElt.Array(); if (coordinateArray.size() < 2) { return false; } if (!isArrayOfCoordinates(coordinateArray)) { return false; } vector<S2Point> vertices; parsePoints(obj.getFieldDotted(GEOJSON_COORDINATES).Array(), &vertices); eraseDuplicatePoints(&vertices); return S2Polyline::IsValid(vertices); }
static bool isGeoJSONPoint(const BSONObj& obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_POINT != type.String()) { return false; } if (!GeoParser::crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); if (coordinates.size() != 2) { return false; } if (!coordinates[0].isNumber() || !coordinates[1].isNumber()) { return false; } // For now, we assume all GeoJSON must be within WGS84 - this may change double lat = coordinates[1].Number(); double lng = coordinates[0].Number(); return isValidLngLat(lng, lat); }
bool GeoParser::isGeometryCollection(const BSONObj &obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_GEOMETRY_COLLECTION != type.String()) { return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_GEOMETRIES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); if (0 == coordinates.size()) { return false; } for (size_t i = 0; i < coordinates.size(); ++i) { if (coordinates[i].eoo() || (Object != coordinates[i].type())) { return false; } BSONObj obj = coordinates[i].Obj(); if (!isGeoJSONPoint(obj) && !isLine(obj) && !isGeoJSONPolygon(obj) && !isMultiPoint(obj) && !isMultiPolygon(obj) && !isMultiLine(obj)) { return false; } } return true; }
bool GeoParser::isMultiPolygon(const BSONObj &obj) { BSONElement type = obj.getFieldDotted(GEOJSON_TYPE); if (type.eoo() || (String != type.type())) { return false; } if (GEOJSON_TYPE_MULTI_POLYGON != type.String()) { return false; } if (!crsIsOK(obj)) { warning() << "Invalid CRS: " << obj.toString() << endl; return false; } BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES); if (coordElt.eoo() || (Array != coordElt.type())) { return false; } const vector<BSONElement>& coordinates = coordElt.Array(); if (0 == coordinates.size()) { return false; } for (size_t i = 0; i < coordinates.size(); ++i) { if (coordinates[i].eoo() || (Array != coordinates[i].type())) { return false; } if (!isGeoJSONPolygonCoordinates(coordinates[i].Array())) { return false; } } return true; }
bool GeoParser::parseGeometryCollection(const BSONObj &obj, GeometryCollection *out) { BSONElement coordElt = obj.getFieldDotted(GEOJSON_GEOMETRIES); const vector<BSONElement>& geometries = coordElt.Array(); for (size_t i = 0; i < geometries.size(); ++i) { const BSONObj& geoObj = geometries[i].Obj(); if (isGeoJSONPoint(geoObj)) { PointWithCRS point; if (!parsePoint(geoObj, &point)) { return false; } out->points.push_back(point); } else if (isLine(geoObj)) { out->lines.mutableVector().push_back(new LineWithCRS()); if (!parseLine(geoObj, out->lines.vector().back())) { return false; } } else if (isGeoJSONPolygon(geoObj)) { out->polygons.mutableVector().push_back(new PolygonWithCRS()); if (!parsePolygon(geoObj, out->polygons.vector().back())) { return false; } } else if (isMultiPoint(geoObj)) { out->multiPoints.mutableVector().push_back(new MultiPointWithCRS()); if (!parseMultiPoint(geoObj, out->multiPoints.mutableVector().back())) { return false; } } else if (isMultiPolygon(geoObj)) { out->multiPolygons.mutableVector().push_back(new MultiPolygonWithCRS()); if (!parseMultiPolygon(geoObj, out->multiPolygons.mutableVector().back())) { return false; } } else { verify(isMultiLine(geoObj)); out->multiLines.mutableVector().push_back(new MultiLineWithCRS()); if (!parseMultiLine(geoObj, out->multiLines.mutableVector().back())) { return false; } } } return true; }
void parseLine( char * line, vector<BSONObj> *objects ){ uassert(13289, "Invalid UTF8 character detected", isValidUTF8(line)); if ( _type == JSON ){ char * end = ( line + strlen( line ) ) - 1; while ( isspace(*end) ){ *end = 0; end--; } BSONObj bson = fromjson( line ); if ( _root.size() > 0 ) { BSONElement e = bson.getFieldDotted( _root.c_str() ); if ( e.eoo() ) cerr << "json root element is not available in json data" << endl; else { switch ( e.type() ) { case Object: objects->push_back( e.Obj() ); break; case Array:{ vector<BSONElement> elements = e.Array(); std::for_each( elements.begin(), elements.end(), element_to_object(objects) ); break; } default: cerr << "json root element is not an object or array" << endl; break; } } } else { objects->push_back(bson); } return; } BSONObjBuilder b; unsigned int pos=0; while ( line[0] ){ string name; if ( pos < _fields.size() ){ name = _fields[pos]; } else { stringstream ss; ss << "field" << pos; name = ss.str(); } pos++; bool done = false; string data; char * end; if ( _type == CSV && line[0] == '"' ){ line++; //skip first '"' while (true) { end = strchr( line , '"' ); if (!end){ data += line; done = true; break; } else if (end[1] == '"') { // two '"'s get appended as one data.append(line, end-line+1); //include '"' line = end+2; //skip both '"'s } else if (end[-1] == '\\') { // "\\\"" gets appended as '"' data.append(line, end-line-1); //exclude '\\' data.append("\""); line = end+1; //skip the '"' } else { data.append(line, end-line); line = end+2; //skip '"' and ',' break; } } } else { end = strstr( line , _sep ); if ( ! end ){ done = true; data = string( line ); } else { data = string( line , end - line ); line = end+1; } } if ( _headerLine ){ while ( isspace( data[0] ) ) data = data.substr( 1 ); _fields.push_back( data ); } else _append( b , name , data ); if ( done ) break; } objects->push_back(b.obj()); }
static void bson2bamboo(const dclass::DistributedType *type, const BSONElement &element, Datagram &dg) { switch(type->get_type()) { case dclass::Type::T_INT8: { dg.add_int8(element.Int()); } break; case dclass::Type::T_INT16: { dg.add_int16(element.Int()); } break; case dclass::Type::T_INT32: { dg.add_int32(element.Int()); } break; case dclass::Type::T_INT64: { dg.add_int64(element.Int()); } break; case dclass::Type::T_UINT8: { dg.add_uint8(element.Int()); } break; case dclass::Type::T_UINT16: { dg.add_uint16(element.Int()); } break; case dclass::Type::T_UINT32: { dg.add_uint32(element.Int()); } break; case dclass::Type::T_UINT64: { dg.add_uint64(element.Int()); } break; case dclass::Type::T_CHAR: { string str = element.String(); if(str.size() != 1) { throw mongo::DBException("Expected single-length string for char field", 0); } dg.add_uint8(str[0]); } break; case dclass::Type::T_FLOAT32: { dg.add_float32(element.Number()); } break; case dclass::Type::T_FLOAT64: { dg.add_float64(element.Number()); } break; case dclass::Type::T_STRING: { dg.add_data(element.String()); } break; case dclass::Type::T_VARSTRING: { dg.add_string(element.String()); } break; case dclass::Type::T_BLOB: { int len; const uint8_t *rawdata = (const uint8_t *)element.binData(len); dg.add_data(rawdata, len); } break; case dclass::Type::T_VARBLOB: { int len; const uint8_t *rawdata = (const uint8_t *)element.binData(len); dg.add_blob(rawdata, len); } break; case dclass::Type::T_ARRAY: { const dclass::ArrayType *array = type->as_array(); std::vector<BSONElement> data = element.Array(); for(auto it = data.begin(); it != data.end(); ++it) { bson2bamboo(array->get_element_type(), *it, dg); } } break; case dclass::Type::T_VARARRAY: { const dclass::ArrayType *array = type->as_array(); std::vector<BSONElement> data = element.Array(); DatagramPtr newdg = Datagram::create(); for(auto it = data.begin(); it != data.end(); ++it) { bson2bamboo(array->get_element_type(), *it, *newdg); } dg.add_blob(newdg->get_data(), newdg->size()); } break; case dclass::Type::T_STRUCT: { const dclass::Struct *s = type->as_struct(); size_t fields = s->get_num_fields(); for(unsigned int i = 0; i < fields; ++i) { const dclass::Field *field = s->get_field(i); bson2bamboo(field->get_type(), element[field->get_name()], dg); } } break; case dclass::Type::T_METHOD: { const dclass::Method *m = type->as_method(); size_t parameters = m->get_num_parameters(); for(unsigned int i = 0; i < parameters; ++i) { const dclass::Parameter *parameter = m->get_parameter(i); string name = parameter->get_name(); if(name.empty() || element[name].eoo()) { stringstream n; n << "_" << i; name = n.str(); } bson2bamboo(parameter->get_type(), element[name], dg); } } break; case dclass::Type::T_INVALID: default: assert(false); break; } }