void TRI_DestroyGeoIndex (TRI_index_t* idx) { TRI_geo_index_t* geo; TRI_DestroyVectorString(&idx->_fields); geo = (TRI_geo_index_t*) idx; GeoIndex_free(geo->_geoIndex); }
void TRI_DestroyHashIndex (TRI_index_t* idx) { TRI_hash_index_t* hashIndex; TRI_DestroyVectorString(&idx->_fields); hashIndex = (TRI_hash_index_t*) idx; TRI_DestroyVector(&hashIndex->_paths); TRI_DestroyHashArray(&hashIndex->_hashArray); }
TRI_index_t* TRI_CreateHashIndex (struct TRI_primary_collection_s* primary, TRI_vector_pointer_t* fields, TRI_vector_t* paths, bool unique, size_t initialDocumentCount) { TRI_hash_index_t* hashIndex; TRI_index_t* idx; int res; // ........................................................................... // Initialize the index and the callback functions // ........................................................................... hashIndex = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_hash_index_t), false); idx = &hashIndex->base; idx->typeName = TypeNameHashIndex; TRI_InitIndex(idx, TRI_IDX_TYPE_HASH_INDEX, primary, unique, true); idx->json = JsonHashIndex; idx->insert = InsertHashIndex; idx->remove = RemoveHashIndex; // ........................................................................... // Copy the contents of the path list vector into a new vector and store this // ........................................................................... TRI_CopyPathVector(&hashIndex->_paths, paths); TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_CopyDataVectorStringFromVectorPointer(TRI_CORE_MEM_ZONE, &idx->_fields, fields); // create a index preallocated for the current number of documents res = TRI_InitHashArray(&hashIndex->_hashArray, initialDocumentCount, hashIndex->_paths._length); // oops, out of memory? if (res != TRI_ERROR_NO_ERROR) { TRI_DestroyVector(&hashIndex->_paths); TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, hashIndex); return NULL; } // ........................................................................... // Assign the function calls used by the query engine // ........................................................................... idx->indexQuery = NULL; idx->indexQueryFree = NULL; idx->indexQueryResult = NULL; return idx; }
static void ScanPath (TRI_vocbase_t* vocbase, char const* path) { TRI_vector_string_t files; TRI_col_type_e type; size_t n; size_t i; files = TRI_FilesDirectory(path); n = files._length; for (i = 0; i < n; ++i) { char* name; char* file; name = files._buffer[i]; if (name[0] == '\0' || name[0] == '_' || name[0] == '.') { continue; } file = TRI_Concatenate2File(path, name); if (!file) { continue; } if (TRI_IsDirectory(file)) { TRI_col_info_t info; bool ok; ok = TRI_LoadParameterInfo(file, &info); if (! ok) { LOG_DEBUG("ignoring directory '%s' without valid parameter file '%s'", file, TRI_COL_PARAMETER_FILE); } else { type = info._type; if (type == TRI_COL_TYPE_SIMPLE_DOCUMENT) { AddCollection(vocbase, type, info._name, info._cid, file); LOG_DEBUG("added simple document collection from '%s'", file); } else { LOG_DEBUG("skipping collection of unknown type %d", (int) type); } } } else { LOG_DEBUG("ignoring non-directory '%s'", file); } TRI_FreeString(file); } TRI_DestroyVectorString(&files); }
void TRI_DestroyCollection (TRI_collection_t* collection) { assert(collection); TRI_FreeCollectionInfoOptions(&collection->_info); FreeDatafilesVector(&collection->_datafiles); FreeDatafilesVector(&collection->_journals); FreeDatafilesVector(&collection->_compactors); TRI_DestroyVectorString(&collection->_indexFiles); TRI_FreeString(TRI_CORE_MEM_ZONE, collection->_directory); }
TRI_index_t* TRI_CreateGeo2Index (TRI_document_collection_t* document, TRI_idx_iid_t iid, char const* latitudeName, TRI_shape_pid_t latitude, char const* longitudeName, TRI_shape_pid_t longitude, bool unique, bool ignoreNull) { char* lat; char* lon; TRI_geo_index_t* geo = static_cast<TRI_geo_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_geo_index_t), false)); TRI_index_t* idx = &geo->base; TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO2_INDEX, document, unique); idx->_ignoreNull = ignoreNull; idx->memory = MemoryGeoIndex; idx->json = JsonGeo2Index; idx->insert = InsertGeoIndex; idx->remove = RemoveGeoIndex; lat = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, latitudeName); lon = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, longitudeName); TRI_PushBackVectorString(&idx->_fields, lat); TRI_PushBackVectorString(&idx->_fields, lon); geo->_geoIndex = GeoIndex_new(); // oops, out of memory? if (geo->_geoIndex == NULL) { TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, geo); return NULL; } geo->_variant = INDEX_GEO_INDIVIDUAL_LAT_LON; geo->_location = 0; geo->_latitude = latitude; geo->_longitude = longitude; GeoIndex_assignMethod(&(idx->indexQuery), TRI_INDEX_METHOD_ASSIGNMENT_QUERY); GeoIndex_assignMethod(&(idx->indexQueryFree), TRI_INDEX_METHOD_ASSIGNMENT_FREE); GeoIndex_assignMethod(&(idx->indexQueryResult), TRI_INDEX_METHOD_ASSIGNMENT_RESULT); return idx; }
TRI_index_t* TRI_CreateGeo1Index (TRI_document_collection_t* document, TRI_idx_iid_t iid, char const* locationName, TRI_shape_pid_t location, bool geoJson, bool unique, bool ignoreNull) { char* ln; TRI_geo_index_t* geo = static_cast<TRI_geo_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_geo_index_t), false)); TRI_index_t* idx = &geo->base; TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE); TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO1_INDEX, document, unique, false); idx->_ignoreNull = ignoreNull; idx->memory = MemoryGeoIndex; idx->json = JsonGeo1Index; idx->insert = InsertGeoIndex; idx->remove = RemoveGeoIndex; ln = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, locationName); TRI_PushBackVectorString(&idx->_fields, ln); geo->_geoIndex = GeoIndex_new(); // oops, out of memory? if (geo->_geoIndex == NULL) { TRI_DestroyVectorString(&idx->_fields); TRI_Free(TRI_CORE_MEM_ZONE, geo); return NULL; } geo->_variant = geoJson ? INDEX_GEO_COMBINED_LAT_LON : INDEX_GEO_COMBINED_LON_LAT; geo->_location = location; geo->_latitude = 0; geo->_longitude = 0; geo->_geoJson = geoJson; return idx; }
bool TRI_ExecuteRubyDirectory (mrb_state* mrb, char const* path) { TRI_vector_string_t files; bool result; regex_t re; size_t i; LOG_TRACE("loading ruby script directory: '%s'", path); files = TRI_FilesDirectory(path); regcomp(&re, "^(.*)\\.rb$", REG_ICASE | REG_EXTENDED); result = true; for (i = 0; i < files._length; ++i) { bool ok; char const* filename; char* full; filename = files._buffer[i]; if (! regexec(&re, filename, 0, 0, 0) == 0) { continue; } full = TRI_Concatenate2File(path, filename); ok = TRI_ExecuteRubyFile(mrb, full); TRI_FreeString(TRI_CORE_MEM_ZONE, full); result = result && ok; if (! ok) { TRI_LogRubyException(mrb, mrb->exc); } } TRI_DestroyVectorString(&files); regfree(&re); return result; }
static TRI_col_file_structure_t ScanCollectionDirectory (char const* path) { TRI_col_file_structure_t structure; TRI_vector_string_t files; regex_t re; size_t i; size_t n; // check files within the directory files = TRI_FilesDirectory(path); n = files._length; regcomp(&re, "^(journal|datafile|index|compactor)-([0-9][0-9]*)\\.(db|json)$", REG_EXTENDED); TRI_InitVectorString(&structure._journals, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._compactors, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._datafiles, TRI_CORE_MEM_ZONE); TRI_InitVectorString(&structure._indexes, TRI_CORE_MEM_ZONE); for (i = 0; i < n; ++i) { char const* file = files._buffer[i]; regmatch_t matches[4]; if (regexec(&re, file, sizeof(matches) / sizeof(matches[0]), matches, 0) == 0) { char const* first = file + matches[1].rm_so; size_t firstLen = matches[1].rm_eo - matches[1].rm_so; char const* third = file + matches[3].rm_so; size_t thirdLen = matches[3].rm_eo - matches[3].rm_so; // ............................................................................. // file is an index // ............................................................................. if (TRI_EqualString2("index", first, firstLen) && TRI_EqualString2("json", third, thirdLen)) { char* filename; filename = TRI_Concatenate2File(path, file); TRI_PushBackVectorString(&structure._indexes, filename); } // ............................................................................. // file is a journal or datafile // ............................................................................. else if (TRI_EqualString2("db", third, thirdLen)) { char* filename; filename = TRI_Concatenate2File(path, file); // file is a journal if (TRI_EqualString2("journal", first, firstLen)) { TRI_PushBackVectorString(&structure._journals, filename); } // file is a compactor file else if (TRI_EqualString2("compactor", first, firstLen)) { TRI_PushBackVectorString(&structure._compactors, filename); } // file is a datafile else if (TRI_EqualString2("datafile", first, firstLen)) { TRI_PushBackVectorString(&structure._datafiles, filename); } // ups, what kind of file is that else { LOG_ERROR("unknown datafile '%s'", file); TRI_FreeString(TRI_CORE_MEM_ZONE, filename); } } else { LOG_ERROR("unknown datafile '%s'", file); } } } TRI_DestroyVectorString(&files); regfree(&re); return structure; }
static bool CheckCollection (TRI_collection_t* collection) { TRI_datafile_t* datafile; TRI_vector_pointer_t all; TRI_vector_pointer_t compactors; TRI_vector_pointer_t datafiles; TRI_vector_pointer_t journals; TRI_vector_pointer_t sealed; TRI_vector_string_t files; bool stop; regex_t re; size_t i; size_t n; stop = false; // check files within the directory files = TRI_FilesDirectory(collection->_directory); n = files._length; regcomp(&re, "^(journal|datafile|index|compactor)-([0-9][0-9]*)\\.(db|json)$", REG_EXTENDED); TRI_InitVectorPointer(&journals, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&compactors, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&datafiles, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&sealed, TRI_UNKNOWN_MEM_ZONE); TRI_InitVectorPointer(&all, TRI_UNKNOWN_MEM_ZONE); for (i = 0; i < n; ++i) { char const* file = files._buffer[i]; regmatch_t matches[4]; if (regexec(&re, file, sizeof(matches) / sizeof(matches[0]), matches, 0) == 0) { char const* first = file + matches[1].rm_so; size_t firstLen = matches[1].rm_eo - matches[1].rm_so; char const* third = file + matches[3].rm_so; size_t thirdLen = matches[3].rm_eo - matches[3].rm_so; // ............................................................................. // file is an index, just store the filename // ............................................................................. if (TRI_EqualString2("index", first, firstLen) && TRI_EqualString2("json", third, thirdLen)) { char* filename; filename = TRI_Concatenate2File(collection->_directory, file); TRI_PushBackVectorString(&collection->_indexFiles, filename); } // ............................................................................. // file is a journal or datafile, open the datafile // ............................................................................. else if (TRI_EqualString2("db", third, thirdLen)) { char* filename; char* ptr; TRI_col_header_marker_t* cm; filename = TRI_Concatenate2File(collection->_directory, file); datafile = TRI_OpenDatafile(filename); if (datafile == NULL) { collection->_lastError = TRI_errno(); stop = true; LOG_ERROR("cannot open datafile '%s': %s", filename, TRI_last_error()); break; } TRI_PushBackVectorPointer(&all, datafile); // check the document header ptr = datafile->_data; ptr += TRI_DF_ALIGN_BLOCK(sizeof(TRI_df_header_marker_t)); cm = (TRI_col_header_marker_t*) ptr; if (cm->base._type != TRI_COL_MARKER_HEADER) { LOG_ERROR("collection header mismatch in file '%s', expected TRI_COL_MARKER_HEADER, found %lu", filename, (unsigned long) cm->base._type); TRI_FreeString(TRI_CORE_MEM_ZONE, filename); stop = true; break; } if (cm->_cid != collection->_info._cid) { LOG_ERROR("collection identifier mismatch, expected %llu, found %llu", (unsigned long long) collection->_info._cid, (unsigned long long) cm->_cid); TRI_FreeString(TRI_CORE_MEM_ZONE, filename); stop = true; break; } // file is a journal if (TRI_EqualString2("journal", first, firstLen)) { if (datafile->_isSealed) { LOG_WARNING("strange, journal '%s' is already sealed; must be a left over; will use it as datafile", filename); TRI_PushBackVectorPointer(&sealed, datafile); } else { TRI_PushBackVectorPointer(&journals, datafile); } } // file is a compactor file else if (TRI_EqualString2("compactor", first, firstLen)) { if (datafile->_isSealed) { LOG_WARNING("strange, compactor journal '%s' is already sealed; must be a left over; will use it as datafile", filename); TRI_PushBackVectorPointer(&sealed, datafile); } else { TRI_PushBackVectorPointer(&compactors, datafile); } } // file is a datafile else if (TRI_EqualString2("datafile", first, firstLen)) { if (! datafile->_isSealed) { LOG_ERROR("datafile '%s' is not sealed, this should never happen", filename); collection->_lastError = TRI_set_errno(TRI_ERROR_ARANGO_CORRUPTED_DATAFILE); stop = true; break; } else { TRI_PushBackVectorPointer(&datafiles, datafile); } } else { LOG_ERROR("unknown datafile '%s'", file); } TRI_FreeString(TRI_CORE_MEM_ZONE, filename); } else { LOG_ERROR("unknown datafile '%s'", file); } } } TRI_DestroyVectorString(&files); regfree(&re); // convert the sealed journals into datafiles if (! stop) { n = sealed._length; for (i = 0; i < n; ++i) { char* number; char* dname; char* filename; bool ok; datafile = sealed._buffer[i]; number = TRI_StringUInt64(datafile->_fid); dname = TRI_Concatenate3String("datafile-", number, ".db"); filename = TRI_Concatenate2File(collection->_directory, dname); TRI_FreeString(TRI_CORE_MEM_ZONE, dname); TRI_FreeString(TRI_CORE_MEM_ZONE, number); ok = TRI_RenameDatafile(datafile, filename); if (ok) { TRI_PushBackVectorPointer(&datafiles, datafile); LOG_DEBUG("renamed sealed journal to '%s'", filename); } else { collection->_lastError = datafile->_lastError; stop = true; LOG_ERROR("cannot rename sealed log-file to %s, this should not happen: %s", filename, TRI_last_error()); break; } TRI_FreeString(TRI_CORE_MEM_ZONE, filename); } } TRI_DestroyVectorPointer(&sealed); // stop if necessary if (stop) { n = all._length; for (i = 0; i < n; ++i) { datafile = all._buffer[i]; LOG_TRACE("closing datafile '%s'", datafile->_filename); TRI_CloseDatafile(datafile); TRI_FreeDatafile(datafile); } TRI_DestroyVectorPointer(&all); TRI_DestroyVectorPointer(&datafiles); return false; } TRI_DestroyVectorPointer(&all); // add the datafiles and journals collection->_datafiles = datafiles; collection->_journals = journals; collection->_compactors = compactors; return true; }
void TRI_FreeVectorString (TRI_memory_zone_t* zone, TRI_vector_string_t* vector) { TRI_DestroyVectorString(vector); TRI_Free(zone, vector); }
void TRI_DestroyCapConstraint (TRI_index_t* idx) { TRI_DestroyVectorString(&idx->_fields); }