TRI_index_t* TRI_CreateCapConstraint (TRI_document_collection_t* document, TRI_idx_iid_t iid, size_t count, int64_t size) { TRI_cap_constraint_t* cap = static_cast<TRI_cap_constraint_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_cap_constraint_t), false)); if (cap == nullptr) { return nullptr; } TRI_index_t* idx = &cap->base; TRI_InitIndex(idx, iid, TRI_IDX_TYPE_CAP_CONSTRAINT, document, false, false); TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); idx->memory = MemoryCapConstraint; idx->json = JsonCapConstraint; idx->removeIndex = RemoveIndexCapConstraint; idx->insert = InsertCapConstraint; idx->postInsert = PostInsertCapConstraint; idx->remove = RemoveCapConstraint; cap->_count = count; cap->_size = size; InitialiseCap(cap, document); return idx; }
TRI_index_t* TRI_CreateHashIndex (struct TRI_primary_collection_s* primary, TRI_vector_pointer_t* fields, TRI_vector_t* paths, bool unique, size_t initialDocumentCount) { TRI_hash_index_t* hashIndex; TRI_index_t* idx; int res; // ........................................................................... // Initialize the index and the callback functions // ........................................................................... hashIndex = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_hash_index_t), false); idx = &hashIndex->base; idx->typeName = TypeNameHashIndex; TRI_InitIndex(idx, TRI_IDX_TYPE_HASH_INDEX, primary, unique, true); idx->json = JsonHashIndex; idx->insert = InsertHashIndex; idx->remove = RemoveHashIndex; // ........................................................................... // Copy the contents of the path list vector into a new vector and store this // ........................................................................... TRI_CopyPathVector(&hashIndex->_paths, paths); TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_CopyDataVectorStringFromVectorPointer(TRI_CORE_MEM_ZONE, &idx->_fields, fields); // create a index preallocated for the current number of documents res = TRI_InitHashArray(&hashIndex->_hashArray, initialDocumentCount, hashIndex->_paths._length); // oops, out of memory? if (res != TRI_ERROR_NO_ERROR) { TRI_DestroyVector(&hashIndex->_paths); TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, hashIndex); return NULL; } // ........................................................................... // Assign the function calls used by the query engine // ........................................................................... idx->indexQuery = NULL; idx->indexQueryFree = NULL; idx->indexQueryResult = NULL; return idx; }
TRI_index_t* TRI_CreateGeo2Index (TRI_document_collection_t* document, TRI_idx_iid_t iid, char const* latitudeName, TRI_shape_pid_t latitude, char const* longitudeName, TRI_shape_pid_t longitude, bool unique, bool ignoreNull) { char* lat; char* lon; TRI_geo_index_t* geo = static_cast<TRI_geo_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_geo_index_t), false)); TRI_index_t* idx = &geo->base; TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO2_INDEX, document, unique); idx->_ignoreNull = ignoreNull; idx->memory = MemoryGeoIndex; idx->json = JsonGeo2Index; idx->insert = InsertGeoIndex; idx->remove = RemoveGeoIndex; lat = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, latitudeName); lon = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, longitudeName); TRI_PushBackVectorString(&idx->_fields, lat); TRI_PushBackVectorString(&idx->_fields, lon); geo->_geoIndex = GeoIndex_new(); // oops, out of memory? if (geo->_geoIndex == NULL) { TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, geo); return NULL; } geo->_variant = INDEX_GEO_INDIVIDUAL_LAT_LON; geo->_location = 0; geo->_latitude = latitude; geo->_longitude = longitude; GeoIndex_assignMethod(&(idx->indexQuery), TRI_INDEX_METHOD_ASSIGNMENT_QUERY); GeoIndex_assignMethod(&(idx->indexQueryFree), TRI_INDEX_METHOD_ASSIGNMENT_FREE); GeoIndex_assignMethod(&(idx->indexQueryResult), TRI_INDEX_METHOD_ASSIGNMENT_RESULT); return idx; }
int TRI_InitVectorString2 (TRI_vector_string_t* vector, TRI_memory_zone_t* zone, size_t initialCapacity) { TRI_InitVectorString(vector, zone); if (initialCapacity != 0) { vector->_buffer = (char**) TRI_Allocate(vector->_memoryZone, initialCapacity * sizeof(char*), false); if (vector->_buffer == NULL) { return TRI_ERROR_OUT_OF_MEMORY; } } vector->_capacity = initialCapacity; return TRI_ERROR_NO_ERROR; }
TRI_index_t* TRI_CreateGeo1Index (TRI_document_collection_t* document, TRI_idx_iid_t iid, char const* locationName, TRI_shape_pid_t location, bool geoJson, bool unique, bool ignoreNull) { char* ln; TRI_geo_index_t* geo = static_cast<TRI_geo_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_geo_index_t), false)); TRI_index_t* idx = &geo->base; TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO1_INDEX, document, unique, false); idx->_ignoreNull = ignoreNull; idx->memory = MemoryGeoIndex; idx->json = JsonGeo1Index; idx->insert = InsertGeoIndex; idx->remove = RemoveGeoIndex; ln = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, locationName); TRI_PushBackVectorString(&idx->_fields, ln); geo->_geoIndex = GeoIndex_new(); // oops, out of memory? if (geo->_geoIndex == NULL) { TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, geo); return NULL; } geo->_variant = geoJson ? INDEX_GEO_COMBINED_LAT_LON : INDEX_GEO_COMBINED_LON_LAT; geo->_location = location; geo->_latitude = 0; geo->_longitude = 0; geo->_geoJson = geoJson; return idx; }
static void InitCollection (TRI_vocbase_t* vocbase, TRI_collection_t* collection, char* directory, const TRI_col_info_t* const info) { assert(collection); memset(collection, 0, sizeof(TRI_collection_t)); TRI_CopyCollectionInfo(&collection->_info, info); collection->_vocbase = vocbase; collection->_state = TRI_COL_STATE_WRITE; collection->_lastError = 0; collection->_maximumMarkerSize = 0; collection->_directory = directory; TRI_InitVectorPointer(&collection->_datafiles, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&collection->_journals, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&collection->_compactors, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorString(&collection->_indexFiles, TRI_CORE_MEM_ZONE); }
static TRI_col_file_structure_t ScanCollectionDirectory (char const* path) { TRI_col_file_structure_t structure; TRI_vector_string_t files; regex_t re; size_t i; size_t n; // check files within the directory files = TRI_FilesDirectory(path); n = files._length; regcomp(&re, "^(journal|datafile|index|compactor)-([0-9][0-9]*)\\.(db|json)$", REG_EXTENDED); TRI_InitVectorString(&structure._journals, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._compactors, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._datafiles, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._indexes, TRI_CORE_MEM_ZONE); for (i = 0; i < n; ++i) { char const* file = files._buffer[i]; regmatch_t matches[4]; if (regexec(&re, file, sizeof(matches) / sizeof(matches[0]), matches, 0) == 0) { char const* first = file + matches[1].rm_so; size_t firstLen = matches[1].rm_eo - matches[1].rm_so; char const* third = file + matches[3].rm_so; size_t thirdLen = matches[3].rm_eo - matches[3].rm_so; // ............................................................................. // file is an index // ............................................................................. if (TRI_EqualString2("index", first, firstLen) && TRI_EqualString2("json", third, thirdLen)) { char* filename; filename = TRI_Concatenate2File(path, file); TRI_PushBackVectorString(&structure._indexes, filename); } // ............................................................................. // file is a journal or datafile // ............................................................................. else if (TRI_EqualString2("db", third, thirdLen)) { char* filename; filename = TRI_Concatenate2File(path, file); // file is a journal if (TRI_EqualString2("journal", first, firstLen)) { TRI_PushBackVectorString(&structure._journals, filename); } // file is a compactor file else if (TRI_EqualString2("compactor", first, firstLen)) { TRI_PushBackVectorString(&structure._compactors, filename); } // file is a datafile else if (TRI_EqualString2("datafile", first, firstLen)) { TRI_PushBackVectorString(&structure._datafiles, filename); } // ups, what kind of file is that else { LOG_ERROR("unknown datafile '%s'", file); TRI_FreeString(TRI_CORE_MEM_ZONE, filename); } } else { LOG_ERROR("unknown datafile '%s'", file); } } } TRI_DestroyVectorString(&files); regfree(&re); return structure; }