void TRI_Insert4ArrayJson (TRI_memory_zone_t* zone, TRI_json_t* object, char* name, size_t nameLength, TRI_json_t* subobject, bool asReference) { TRI_json_t copy; TRI_ASSERT(name != NULL); // attribute name if (asReference) { InitStringReference(©, name, nameLength); } else { InitString(©, name, nameLength); } if (TRI_ReserveVector(&object->_value._objects, 2) != TRI_ERROR_NO_ERROR) { // TODO: signal OOM here return; } TRI_PushBackVector(&object->_value._objects, ©); // attribute value TRI_PushBackVector(&object->_value._objects, subobject); }
int32_t other (int32_t left, uint32_t range) { uint32_t g = UINT32_MAX - UINT32_MAX % range; uint32_t r = device->random(); int count = 0; static int const MAX_COUNT = 20; TRI_ASSERT(g > 0); while (r >= g) { if (++count >= MAX_COUNT) { LOG_ERROR("cannot generate small random number after %d tries", count); r %= g; continue; } LOG_DEBUG("random number too large, trying again"); r = device->random(); } r %= range; return left + static_cast<int32_t>(r); }
void operator()(VPackSlice const& source, std::vector<ArangoDBPathFinder::Step*>& result) { std::vector<TRI_doc_mptr_t*> cursor; std::unique_ptr<arangodb::OperationCursor> edgeCursor; std::unordered_map<VPackSlice, size_t> candidates; for (auto const& edgeCollection : _block->_collectionInfos) { TRI_ASSERT(edgeCollection != nullptr); if (_reverse) { edgeCursor = edgeCollection->getReverseEdges(source); } else { edgeCursor = edgeCollection->getEdges(source); } candidates.clear(); // Clear the local cursor before using the // next edge cursor. // While iterating over the edge cursor, _cursor // has to stay intact. cursor.clear(); while (edgeCursor->hasMore()) { edgeCursor->getMoreMptr(cursor, UINT64_MAX); for (auto const& mptr : cursor) { VPackSlice edge(mptr->vpack()); VPackSlice from = arangodb::Transaction::extractFromFromDocument(edge); VPackSlice to = arangodb::Transaction::extractToFromDocument(edge); double currentWeight = edgeCollection->weightEdge(edge); if (from == source) { inserter(candidates, result, from, to, currentWeight, edge); } else { inserter(candidates, result, to, from, currentWeight, edge); } } } } }
static void ReadContinue(int rc, TRI_read_write_lock_t* lock) { bool complained = false; again: if (rc == EAGAIN) { // use busy waiting if we cannot acquire the read-lock in case of too many // concurrent read locks ("resource temporarily unavailable"). // in this case we'll wait in a busy loop until we can acquire the lock if (!complained) { LOG(WARN) << "too many read-locks on read-write lock"; complained = true; } usleep(BUSY_LOCK_DELAY); #ifdef TRI_HAVE_SCHED_H // let other threads do things sched_yield(); #endif rc = pthread_rwlock_rdlock(lock); if (rc == 0) { return; // done } // ideal use case for goto :-) goto again; } if (rc == EDEADLK) { LOG(ERR) << "rw-lock deadlock detected"; } #ifdef ARANGODB_ENABLE_MAINTAINER_MODE LOG(ERR) << "could not read-lock the read-write lock: " << strerror(rc); TRI_ASSERT(false); #endif LOG(FATAL) << "could not read-lock the read-write lock: " << strerror(rc); FATAL_ERROR_EXIT(); }
static int InitialiseCap (TRI_cap_constraint_t* cap, TRI_document_collection_t* document) { TRI_ASSERT(cap->_count > 0 || cap->_size > 0); TRI_headers_t* headers = document->_headersPtr; // ONLY IN INDEX (CAP) size_t currentCount = headers->count(); int64_t currentSize = headers->size(); if ((cap->_count > 0 && currentCount <= cap->_count) && (cap->_size > 0 && currentSize <= cap->_size)) { // nothing to do return TRI_ERROR_NO_ERROR; } else { TRI_vocbase_t* vocbase = document->_vocbase; TRI_voc_cid_t cid = document->_info._cid; triagens::arango::SingleCollectionWriteTransaction<UINT64_MAX> trx(new triagens::arango::StandaloneTransactionContext(), vocbase, cid); trx.addHint(TRI_TRANSACTION_HINT_LOCK_NEVER, false); trx.addHint(TRI_TRANSACTION_HINT_NO_BEGIN_MARKER, false); trx.addHint(TRI_TRANSACTION_HINT_NO_ABORT_MARKER, false); trx.addHint(TRI_TRANSACTION_HINT_SINGLE_OPERATION, false); // this is actually not true, but necessary to create trx id 0 int res = trx.begin(); if (res != TRI_ERROR_NO_ERROR) { return res; } TRI_transaction_collection_t* trxCollection = trx.trxCollection(); res = ApplyCap(cap, document, trxCollection); res = trx.finish(res); return res; } }
void TRI_headers_t::relink (TRI_doc_mptr_t* header, TRI_doc_mptr_t* old) { if (header == nullptr) { return; } TRI_ASSERT(header->getDataPtr() != nullptr); // ONLY IN HEADERS, PROTECTED by RUNTIME int64_t size = (int64_t) ((TRI_df_marker_t*) header->getDataPtr())->_size; // ONLY IN HEADERS, PROTECTED by RUNTIME TRI_ASSERT(size > 0); TRI_ASSERT(_begin != header); TRI_ASSERT(_end != header); this->move(header, old); _nrLinked++; _totalSize += TRI_DF_ALIGN_BLOCK(size); TRI_ASSERT(_totalSize > 0); TRI_ASSERT(header->_prev != header); TRI_ASSERT(header->_next != header); }
void TRI_headers_t::release (TRI_doc_mptr_t* header, bool unlinkHeader) { if (header == nullptr) { return; } if (unlinkHeader) { this->unlink(header); } header->clear(); TRI_ASSERT(_nrAllocated > 0); _nrAllocated--; header->setDataPtr(_freelist); // ONLY IN HEADERS _freelist = header; if (_nrAllocated == 0 && _blocks._length >= 8) { // if this was the last header, we can safely reclaim some // memory by freeing all already-allocated blocks and wiping the freelist // we only do this if we had allocated 8 blocks of headers // this limit is arbitrary, but will ensure we only free memory if // it is sensible and not everytime the last document is removed for (size_t i = 0; i < _blocks._length; ++i) { delete[] static_cast<TRI_doc_mptr_t*>(_blocks._buffer[i]); _blocks._buffer[i] = nullptr; } // set length to 0 _blocks._length = 0; _freelist = nullptr; _begin = nullptr; _end = nullptr; } }
void TRI_WriteLockReadWriteLock(TRI_read_write_lock_t* lock) { #ifdef TRI_TRACE_LOCKS if (_threadLocks.find(lock) != _threadLocks.end()) { LockError(lock, 2); } #endif int rc = pthread_rwlock_wrlock(lock); if (rc != 0) { if (rc == EDEADLK) { LOG(ERR) << "rw-lock deadlock detected"; } #ifdef ARANGODB_ENABLE_MAINTAINER_MODE LOG(ERR) << "could not write-lock the read-write lock: " << strerror(rc); TRI_ASSERT(false); #endif LOG(FATAL) << "could not write-lock the read-write lock: " << strerror(rc); FATAL_ERROR_EXIT(); } #ifdef TRI_TRACE_LOCKS _threadLocks.emplace(lock, 2); #endif }
TRI_aql_variable_t* TRI_CreateVariableAql (char const* name, TRI_aql_node_t* definingNode) { TRI_aql_variable_t* variable; variable = (TRI_aql_variable_t*) TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_aql_variable_t), false); if (variable == nullptr) { return nullptr; } variable->_name = TRI_DuplicateStringZ(TRI_UNKNOWN_MEM_ZONE, name); if (variable->_name == nullptr) { TRI_FreeVariableAql(variable); return nullptr; } variable->_definingNode = definingNode; variable->_isUpdated = false; TRI_ASSERT(definingNode); return variable; }
char* TRI_GetContextErrorAql (const char* const query, const size_t queryLength, const size_t line, const size_t column) { const char* p; char* q; char* result; char c; // note: line numbers reported by bison/flex start at 1, columns start at 0 size_t offset; size_t currentLine = 1; size_t currentColumn = 0; TRI_ASSERT(query); p = query; while ((c = *p)) { if (currentLine > line || (currentLine >= line && currentColumn >= column)) { break; } if (c == '\n') { ++p; ++currentLine; currentColumn = 0; } else if (c == '\r') { ++p; ++currentLine; currentColumn = 0; if (*p == '\n') { ++p; } } else { ++currentColumn; ++p; } } // p is pointing at the position in the query the parse error occurred at TRI_ASSERT(p >= query); offset = (size_t) (p - query); if (queryLength < offset + SNIPPET_LENGTH) { return TRI_DuplicateString2Z(TRI_UNKNOWN_MEM_ZONE, query + offset, queryLength - offset); } q = result = static_cast<char*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, SNIPPET_LENGTH + strlen(SNIPPET_SUFFIX) + 1, false)); if (result == NULL) { // out of memory return NULL; } // copy query part memcpy(q, query + offset, SNIPPET_LENGTH); q += SNIPPET_LENGTH; // copy ... memcpy(q, SNIPPET_SUFFIX, strlen(SNIPPET_SUFFIX)); q += strlen(SNIPPET_SUFFIX); *q = '\0'; return result; }
int TRI_PushBack2ListJson (TRI_json_t* list, TRI_json_t const* object) { TRI_ASSERT(list->_type == TRI_JSON_LIST); TRI_ASSERT(object); return TRI_PushBackVector(&list->_value._objects, object); }
size_t TRI_LengthListJson (TRI_json_t const* json) { TRI_ASSERT(json != NULL && json->_type==TRI_JSON_LIST); return json->_value._objects._length; }
int TRI_ParseCsvString (TRI_csv_parser_t* parser, char const* line, size_t length) { char* ptr; char* qtr; // append line to buffer if (line != nullptr) { TRI_ASSERT(parser->_begin <= parser->_start); TRI_ASSERT(parser->_start <= parser->_written); TRI_ASSERT(parser->_written <= parser->_current); TRI_ASSERT(parser->_current <= parser->_stop); TRI_ASSERT(parser->_stop <= parser->_end); // there is enough room between STOP and END if (parser->_stop + length <= parser->_end) { memcpy(parser->_stop, line, length); parser->_stop += length; parser->_nMemcpy++; } else { size_t l1 = parser->_start - parser->_begin; size_t l2 = parser->_end - parser->_stop; size_t l3; // not enough room, but enough room between BEGIN and START plus STOP and END if (length <= l1 + l2) { l3 = parser->_stop - parser->_start; if (0 < l3) { memmove(parser->_begin, parser->_start, l3); } memcpy(parser->_begin + l3, line, length); parser->_start = parser->_begin; parser->_written = parser->_written - l1; parser->_current = parser->_current - l1; parser->_stop = parser->_begin + l3 + length; parser->_nMemmove++; } // really not enough room else { size_t l4, l5; l2 = parser->_stop - parser->_start; l3 = parser->_end - parser->_begin + length; l4 = parser->_written - parser->_start; l5 = parser->_current - parser->_start; ptr = static_cast<char*>(TRI_Allocate(parser->_memoryZone, l3, false)); if (ptr == nullptr) { return TRI_ERROR_OUT_OF_MEMORY; } memcpy(ptr, parser->_start, l2); memcpy(ptr + l2, line, length); TRI_Free(parser->_memoryZone, parser->_begin); parser->_begin = ptr; parser->_start = ptr; parser->_written = ptr + l4; parser->_current = ptr + l5; parser->_stop = ptr + l2 + length; parser->_end = ptr + l3; parser->_nResize++; } } // start parsing or continue ptr = parser->_current; qtr = parser->_written; while (true) { switch (parser->_state) { case TRI_CSV_PARSER_BOL: if (ptr == parser->_stop) { parser->_written = ptr; parser->_current = ptr; return false; } parser->begin(parser, parser->_row); parser->_column = 0; parser->_state = TRI_CSV_PARSER_BOF; break; case TRI_CSV_PARSER_BOL2: if (ptr == parser->_stop) { parser->_written = ptr; parser->_current = ptr; return false; } if (*ptr == '\n') { ptr++; } parser->_state = TRI_CSV_PARSER_BOL; break; case TRI_CSV_PARSER_BOF: if (ptr == parser->_stop) { parser->_written = ptr; parser->_current = ptr; return TRI_ERROR_CORRUPTED_CSV; } else if (parser->_useQuote && *ptr == parser->_quote) { if (ptr + 1 == parser->_stop) { parser->_written = qtr; parser->_current = ptr; return TRI_ERROR_CORRUPTED_CSV; } parser->_state = TRI_CSV_PARSER_WITHIN_QUOTED_FIELD; parser->_start = ++ptr; qtr = parser->_written = ptr; } else { parser->_state = TRI_CSV_PARSER_WITHIN_FIELD; parser->_start = ptr; qtr = parser->_written = ptr; } break; case TRI_CSV_PARSER_CORRUPTED: while (ptr < parser->_stop && *ptr != parser->_separator && *ptr != '\n') { ptr++; } // found separator or eol if (ptr < parser->_stop) { // found separator if (*ptr == parser->_separator) { ptr++; parser->_state = TRI_CSV_PARSER_BOF; } // found eol else { ptr++; parser->_row++; parser->_state = TRI_CSV_PARSER_BOL; } } // need more input else { parser->_written = qtr; parser->_current = ptr; return TRI_ERROR_NO_ERROR; } break; case TRI_CSV_PARSER_WITHIN_FIELD: while (ptr < parser->_stop && *ptr != parser->_separator && *ptr != '\r' && *ptr != '\n') { *qtr++ = *ptr++; } // found separator or eol if (ptr < parser->_stop) { // found separator if (*ptr == parser->_separator) { *qtr = '\0'; parser->add(parser, parser->_start, qtr - parser->_start, parser->_row, parser->_column, false); ptr++; parser->_column++; parser->_state = TRI_CSV_PARSER_BOF; } // found eol else { char c = *ptr; *qtr = '\0'; parser->end(parser, parser->_start, qtr - parser->_start, parser->_row, parser->_column, false); parser->_row++; if (c == '\r') { parser->_state = TRI_CSV_PARSER_BOL2; } else { parser->_state = TRI_CSV_PARSER_BOL; } ptr++; } } // need more input else { parser->_written = qtr; parser->_current = ptr; return TRI_ERROR_NO_ERROR; } break; case TRI_CSV_PARSER_WITHIN_QUOTED_FIELD: TRI_ASSERT(parser->_useQuote); while (ptr < parser->_stop && *ptr != parser->_quote && (! parser->_useBackslash || *ptr != '\\')) { *qtr++ = *ptr++; } // found quote or a backslash, need at least another quote, a separator, or an eol if (ptr + 1 < parser->_stop) { bool foundBackslash = (parser->_useBackslash && *ptr == '\\'); ++ptr; if (foundBackslash) { if (*ptr == parser->_quote || *ptr == '\\') { // backslash-escaped quote or literal backslash *qtr++ = *ptr; ptr++; break; } } else if (*ptr == parser->_quote) { // a real quote *qtr++ = parser->_quote; ptr++; break; } // ignore spaces while ((*ptr == ' ' || *ptr == '\t') && (ptr + 1) < parser->_stop) { ++ptr; } // found separator if (*ptr == parser->_separator) { *qtr = '\0'; parser->add(parser, parser->_start, qtr - parser->_start, parser->_row, parser->_column, true); ptr++; parser->_column++; parser->_state = TRI_CSV_PARSER_BOF; } else if (*ptr == '\r' || *ptr == '\n') { char c = *ptr; *qtr = '\0'; parser->end(parser, parser->_start, qtr - parser->_start, parser->_row, parser->_column, true); parser->_row++; if (c == '\r') { parser->_state = TRI_CSV_PARSER_BOL2; } else { parser->_state = TRI_CSV_PARSER_BOL; } ptr++; } // ups else { parser->_state = TRI_CSV_PARSER_CORRUPTED; } } // need more input else { parser->_written = qtr; parser->_current = ptr; return true; } break; } } } return TRI_ERROR_CORRUPTED_CSV; }
bool TraverserExpression::matchesCheck(arangodb::Transaction* trx, VPackSlice const& element) const { TRI_ASSERT(trx != nullptr); VPackSlice base = arangodb::basics::VelocyPackHelper::EmptyObjectValue(); VPackSlice value = element.resolveExternal(); // initialize compare value to Null VPackSlice result = arangodb::basics::VelocyPackHelper::NullValue(); // perform recursive check. this may modify value if (recursiveCheck(varAccess, value, base)) { result = value; } // hack for _id attribute TransactionBuilderLeaser builder(trx); if (result.isCustom() && base.isObject()) { builder->add(VPackValue(trx->extractIdString(base))); result = builder->slice(); } TRI_ASSERT(compareTo != nullptr); VPackOptions* options = trx->transactionContext()->getVPackOptions(); switch (comparisonType) { case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_EQ: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), false, options) == 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_NE: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), false, options) != 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_LT: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) < 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_LE: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) <= 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_GE: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) >= 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_GT: return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) > 0; case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_IN: { // In means any of the elements in compareTo is identical VPackSlice compareArray = compareTo->slice(); for (auto const& cmp : VPackArrayIterator(compareArray)) { if (arangodb::basics::VelocyPackHelper::compare(result, cmp, false, options) == 0) { // One is identical return true; } } // If we get here non is identical return false; } case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_NIN: { // NIN means none of the elements in compareTo is identical VPackSlice compareArray = compareTo->slice(); for (auto const& cmp : VPackArrayIterator(compareArray)) { if (arangodb::basics::VelocyPackHelper::compare(result, cmp, false, options) == 0) { // One is identical return false; } } // If we get here non is identical return true; } default: TRI_ASSERT(false); } return false; }
int TRI_GetErrorCodeAql (const TRI_aql_error_t* const error) { TRI_ASSERT(error); return error->_code; }
int TRI_InsertElementHashArrayMulti (TRI_hash_array_multi_t* array, TRI_index_search_value_t const* key, TRI_hash_index_element_multi_t* element, bool isRollback) { if (! CheckResize(array)) { return TRI_ERROR_OUT_OF_MEMORY; } uint64_t const n = array->_nrAlloc; uint64_t i, k; i = k = HashKey(array, key) % n; for (; i < n && array->_table[i]._document != nullptr && ! IsEqualKeyElement(array, key, &array->_table[i]); ++i); if (i == n) { for (i = 0; i < k && array->_table[i]._document != nullptr && ! IsEqualKeyElement(array, key, &array->_table[i]); ++i); } TRI_ASSERT_EXPENSIVE(i < n); TRI_hash_index_element_multi_t* arrayElement = &array->_table[i]; // ........................................................................... // If we found an element, return. While we allow duplicate entries in the // hash table, we do not allow duplicate elements. Elements would refer to the // (for example) an actual row in memory. This is different from the // TRI_InsertElementMultiArray function below where we only have keys to // differentiate between elements. // ........................................................................... bool found = (arrayElement->_document != nullptr); if (found) { if (isRollback) { if (arrayElement->_document == element->_document) { DestroyElement(array, element); return TRI_RESULT_ELEMENT_EXISTS; } auto current = arrayElement->_next; while (current != nullptr) { if (current->_document == element->_document) { DestroyElement(array, element); return TRI_RESULT_ELEMENT_EXISTS; } current = current->_next; } } auto ptr = GetFromFreelist(array); if (ptr == nullptr) { return TRI_ERROR_OUT_OF_MEMORY; } // link our element at the list head ptr->_document = element->_document; ptr->_subObjects = element->_subObjects; ptr->_next = arrayElement->_next; arrayElement->_next = ptr; // it is ok to destroy the element here, because we have copied its internal before! element->_subObjects = nullptr; DestroyElement(array, element); return TRI_ERROR_NO_ERROR; } TRI_ASSERT(arrayElement->_next == nullptr); // not found in list, now insert element->_next = nullptr; *arrayElement = *element; array->_nrUsed++; TRI_ASSERT(arrayElement->_next == nullptr); return TRI_ERROR_NO_ERROR; }
void* TRI_ReallocateZ (TRI_memory_zone_t* zone, void* m, uint64_t n, char const* file, int line) { #else void* TRI_Reallocate (TRI_memory_zone_t* zone, void* m, uint64_t n) { #endif if (m == nullptr) { #ifdef TRI_ENABLE_MAINTAINER_MODE return TRI_AllocateZ(zone, n, false, file, line); #else return TRI_Allocate(zone, n, false); #endif } char* p = (char*) m; #ifdef TRI_ENABLE_MAINTAINER_MODE CheckSize(n, file, line); #endif p = static_cast<char*>(REALLOC_WRAPPER(zone, p, (size_t) n)); if (p == nullptr) { if (zone->_failable) { TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY); return nullptr; } if (CoreReserve == nullptr) { fprintf(stderr, "FATAL: failed to re-allocate %llu bytes for memory zone %d" ZONE_DEBUG_LOCATION ", giving up!\n", (unsigned long long) n, zone->_zid ZONE_DEBUG_PARAMS); TRI_EXIT_FUNCTION(EXIT_FAILURE, nullptr); } free(CoreReserve); CoreReserve = nullptr; fprintf(stderr, "failed to re-allocate %llu bytes for memory zone %d" ZONE_DEBUG_LOCATION ", retrying!\n", (unsigned long long) n, (int) zone->_zid ZONE_DEBUG_PARAMS); #ifdef TRI_ENABLE_MAINTAINER_MODE return TRI_ReallocateZ(zone, m, n, file, line); #else return TRI_Reallocate(zone, m, n); #endif } return p; } //////////////////////////////////////////////////////////////////////////////// /// @brief basic memory management for deallocate //////////////////////////////////////////////////////////////////////////////// #ifdef TRI_ENABLE_MAINTAINER_MODE void TRI_FreeZ (TRI_memory_zone_t* zone, void* m, char const* file, int line) { #else void TRI_Free (TRI_memory_zone_t* zone, void* m) { #endif char* p = (char*) m; #ifdef TRI_ENABLE_MAINTAINER_MODE if (p == nullptr) { fprintf(stderr, "freeing nil ptr " ZONE_DEBUG_LOCATION ZONE_DEBUG_PARAMS); // crash intentionally TRI_ASSERT(false); } #endif free(p); }
void* TRI_PopStackParseAql (TRI_aql_context_t* const context) { TRI_ASSERT(context); TRI_ASSERT(context->_parser->_stack._length > 0); return TRI_RemoveVectorPointer(&context->_parser->_stack, context->_parser->_stack._length - 1); }
void* TRI_PeekStackParseAql (TRI_aql_context_t* const context) { TRI_ASSERT(context); TRI_ASSERT(context->_parser->_stack._length > 0); return context->_parser->_stack._buffer[context->_parser->_stack._length - 1]; }
size_t TRI_LengthVectorJson (TRI_json_t const* json) { TRI_ASSERT(json != nullptr && (json->_type == TRI_JSON_ARRAY || json->_type == TRI_JSON_OBJECT)); return TRI_LengthVector(&json->_value._objects); }
int VelocyPackHelper::compare(VPackSlice lhs, VPackSlice rhs, bool useUTF8, VPackOptions const* options, VPackSlice const* lhsBase, VPackSlice const* rhsBase) { { // will resolve externals... int lWeight = TypeWeight(lhs); int rWeight = TypeWeight(rhs); if (lWeight < rWeight) { return -1; } if (lWeight > rWeight) { return 1; } TRI_ASSERT(lWeight == rWeight); } lhs = lhs.resolveExternal(); // follow externals rhs = rhs.resolveExternal(); // follow externals // lhs and rhs have equal weights if (lhs.isNone() || rhs.isNone()) { // either lhs or rhs is none. we cannot be sure here that both are // nones. // there can also exist the situation that lhs is a none and rhs is a // null value // (or vice versa). Anyway, the compare value is the same for both, return 0; } auto lhsType = lhs.type(); switch (lhsType) { case VPackValueType::Illegal: case VPackValueType::MinKey: case VPackValueType::MaxKey: case VPackValueType::None: case VPackValueType::Null: return 0; case VPackValueType::Bool: { bool left = lhs.getBoolean(); bool right = rhs.getBoolean(); if (left == right) { return 0; } if (!left && right) { return -1; } return 1; } case VPackValueType::Double: case VPackValueType::Int: case VPackValueType::UInt: case VPackValueType::SmallInt: { return compareNumberValues(lhsType, lhs, rhs); } case VPackValueType::Custom: case VPackValueType::String: { std::string lhsString; VPackValueLength nl; char const* left; if (lhs.isCustom()) { if (lhsBase == nullptr || options == nullptr || options->customTypeHandler == nullptr) { THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "Could not extract custom attribute."); } lhsString.assign(options->customTypeHandler->toString(lhs, options, *lhsBase)); left = lhsString.c_str(); nl = lhsString.size(); } else { left = lhs.getString(nl); } TRI_ASSERT(left != nullptr); std::string rhsString; VPackValueLength nr; char const* right; if (rhs.isCustom()) { if (rhsBase == nullptr || options == nullptr || options->customTypeHandler == nullptr) { THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "Could not extract custom attribute."); } rhsString.assign(options->customTypeHandler->toString(rhs, options, *rhsBase)); right = rhsString.c_str(); nr = rhsString.size(); } else { right = rhs.getString(nr); } TRI_ASSERT(right != nullptr); int res; if (useUTF8) { res = TRI_compare_utf8(left, static_cast<size_t>(nl), right, static_cast<size_t>(nr)); } else { size_t len = static_cast<size_t>(nl < nr ? nl : nr); res = memcmp(left, right, len); } if (res < 0) { return -1; } if (res > 0) { return 1; } // res == 0 if (nl == nr) { return 0; } // res == 0, but different string lengths return nl < nr ? -1 : 1; } case VPackValueType::Array: { VPackValueLength const nl = lhs.length(); VPackValueLength const nr = rhs.length(); VPackValueLength const n = (std::max)(nr, nl); for (VPackValueLength i = 0; i < n; ++i) { VPackSlice lhsValue; if (i < nl) { lhsValue = lhs.at(i).resolveExternal(); } VPackSlice rhsValue; if (i < nr) { rhsValue = rhs.at(i).resolveExternal(); } int result = compare(lhsValue, rhsValue, useUTF8, options, &lhs, &rhs); if (result != 0) { return result; } } return 0; } case VPackValueType::Object: { std::set<std::string, AttributeSorterUTF8> keys; VPackCollection::keys(lhs, keys); VPackCollection::keys(rhs, keys); for (auto const& key : keys) { VPackSlice lhsValue = lhs.get(key).resolveExternal(); if (lhsValue.isNone()) { // not present => null lhsValue = VPackSlice::nullSlice(); } VPackSlice rhsValue = rhs.get(key).resolveExternal(); if (rhsValue.isNone()) { // not present => null rhsValue = VPackSlice::nullSlice(); } int result = compare(lhsValue, rhsValue, useUTF8, options, &lhs, &rhs); if (result != 0) { return result; } } return 0; } default: // Contains all other ValueTypes of VelocyPack. // They are not used in ArangoDB so this cannot occur TRI_ASSERT(false); return 0; } }
static uint64_t HashJsonRecursive (uint64_t hash, TRI_json_t const* object) { if (nullptr == object) { return HashBlock(hash, "null", 4); // strlen("null") } switch (object->_type) { case TRI_JSON_UNUSED: { return hash; } case TRI_JSON_NULL: { return HashBlock(hash, "null", 4); // strlen("null") } case TRI_JSON_BOOLEAN: { if (object->_value._boolean) { return HashBlock(hash, "true", 4); // strlen("true") } else { return HashBlock(hash, "false", 5); // strlen("true") } } case TRI_JSON_NUMBER: { return HashBlock(hash, (char const*) &(object->_value._number), sizeof(object->_value._number)); } case TRI_JSON_STRING: case TRI_JSON_STRING_REFERENCE: { return HashBlock(hash, object->_value._string.data, object->_value._string.length); } case TRI_JSON_OBJECT: { hash = HashBlock(hash, "array", 5); // strlen("array") size_t const n = TRI_LengthVector(&object->_value._objects); uint64_t tmphash = hash; for (size_t i = 0; i < n; i += 2) { auto subjson = static_cast<TRI_json_t const*>(TRI_AddressVector(&object->_value._objects, i)); TRI_ASSERT(TRI_IsStringJson(subjson)); tmphash ^= HashJsonRecursive(hash, subjson); subjson = static_cast<TRI_json_t const*>(TRI_AddressVector(&object->_value._objects, i + 1)); tmphash ^= HashJsonRecursive(hash, subjson); } return tmphash; } case TRI_JSON_ARRAY: { hash = HashBlock(hash, "list", 4); // strlen("list") size_t const n = TRI_LengthVector(&object->_value._objects); for (size_t i = 0; i < n; ++i) { auto subjson = static_cast<TRI_json_t const*>(TRI_AddressVector(&object->_value._objects, i)); hash = HashJsonRecursive(hash, subjson); } return hash; } } return hash; // never reached }
static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone, TRI_json_t const* lhs, TRI_json_t const* rhs, bool nullMeansRemove, bool mergeObjects) { TRI_ASSERT(lhs != nullptr); std::unique_ptr<TRI_json_t> result(TRI_CopyJson(zone, lhs)); if (result == nullptr) { return nullptr; } auto r = result.get(); // shortcut variable size_t const n = TRI_LengthVector(&rhs->_value._objects); for (size_t i = 0; i < n; i += 2) { // enumerate all the replacement values auto key = static_cast<TRI_json_t const*>(TRI_AtVector(&rhs->_value._objects, i)); auto value = static_cast<TRI_json_t const*>(TRI_AtVector(&rhs->_value._objects, i + 1)); if (value->_type == TRI_JSON_NULL && nullMeansRemove) { // replacement value is a null and we don't want to store nulls => delete attribute from the result TRI_DeleteObjectJson(zone, r, key->_value._string.data); } else { // replacement value is not a null or we want to store nulls TRI_json_t const* lhsValue = TRI_LookupObjectJson(lhs, key->_value._string.data); if (lhsValue == nullptr) { // existing array does not have the attribute => append new attribute if (value->_type == TRI_JSON_OBJECT && nullMeansRemove) { TRI_json_t empty; TRI_InitObjectJson(TRI_UNKNOWN_MEM_ZONE, &empty); TRI_json_t* merged = MergeRecursive(zone, &empty, value, nullMeansRemove, mergeObjects); if (merged == nullptr) { return nullptr; } TRI_json_t* copy = TRI_CopyJson(zone, value); if (copy == nullptr) { return nullptr; } TRI_Insert3ObjectJson(zone, r, key->_value._string.data, copy); } else { TRI_Insert3ObjectJson(zone, r, key->_value._string.data, TRI_CopyJson(zone, value)); } } else { // existing array already has the attribute => replace attribute if (lhsValue->_type == TRI_JSON_OBJECT && value->_type == TRI_JSON_OBJECT && mergeObjects) { TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeObjects); if (merged == nullptr) { return nullptr; } TRI_ReplaceObjectJson(zone, r, key->_value._string.data, merged); TRI_FreeJson(zone, merged); } else { TRI_ReplaceObjectJson(zone, r, key->_value._string.data, value); } } } } return result.release(); }
int TRI_CompareValuesJson (TRI_json_t const* lhs, TRI_json_t const* rhs, bool useUTF8) { // note: both lhs and rhs may be NULL! { int lWeight = TypeWeight(lhs); int rWeight = TypeWeight(rhs); if (lWeight < rWeight) { return -1; } if (lWeight > rWeight) { return 1; } TRI_ASSERT_EXPENSIVE(lWeight == rWeight); } // lhs and rhs have equal weights if (lhs == nullptr || rhs == nullptr) { // either lhs or rhs is a nullptr. we cannot be sure here that both are nullptrs. // there can also exist the situation that lhs is a nullptr and rhs is a JSON null value // (or vice versa). Anyway, the compare value is the same for both, return 0; } switch (lhs->_type) { case TRI_JSON_UNUSED: case TRI_JSON_NULL: { return 0; // null == null; } case TRI_JSON_BOOLEAN: { if (lhs->_value._boolean == rhs->_value._boolean) { return 0; } if (! lhs->_value._boolean && rhs->_value._boolean) { return -1; } return 1; } case TRI_JSON_NUMBER: { if (lhs->_value._number == rhs->_value._number) { return 0; } if (lhs->_value._number < rhs->_value._number) { return -1; } return 1; } case TRI_JSON_STRING: case TRI_JSON_STRING_REFERENCE: { // same for STRING and STRING_REFERENCE TRI_ASSERT(lhs->_value._string.data != nullptr); TRI_ASSERT(rhs->_value._string.data != nullptr); int res; size_t const nl = lhs->_value._string.length - 1; size_t const nr = rhs->_value._string.length - 1; if (useUTF8) { res = TRI_compare_utf8(lhs->_value._string.data, nl, rhs->_value._string.data, nr); } else { // beware of strings containing NUL bytes size_t len = nl < nr ? nl : nr; res = memcmp(lhs->_value._string.data, rhs->_value._string.data, len); } if (res < 0) { return -1; } else if (res > 0) { return 1; } // res == 0 if (nl == nr) { return 0; } // res == 0, but different string lengths return nl < nr ? -1 : 1; } case TRI_JSON_ARRAY: { size_t const nl = TRI_LengthVector(&lhs->_value._objects); size_t const nr = TRI_LengthVector(&rhs->_value._objects); size_t n; if (nl > nr) { n = nl; } else { n = nr; } for (size_t i = 0; i < n; ++i) { auto lhsValue = (i >= nl) ? nullptr : static_cast<TRI_json_t const*>(TRI_AtVector(&lhs->_value._objects, i)); auto rhsValue = (i >= nr) ? nullptr : static_cast<TRI_json_t const*>(TRI_AtVector(&rhs->_value._objects, i)); int result = TRI_CompareValuesJson(lhsValue, rhsValue, useUTF8); if (result != 0) { return result; } } return 0; } case TRI_JSON_OBJECT: { TRI_ASSERT(lhs->_type == TRI_JSON_OBJECT); TRI_ASSERT(rhs->_type == TRI_JSON_OBJECT); std::unique_ptr<TRI_json_t> keys(GetMergedKeyArray(lhs, rhs)); if (keys != nullptr) { auto json = keys.get(); size_t const n = TRI_LengthVector(&json->_value._objects); for (size_t i = 0; i < n; ++i) { auto keyElement = static_cast<TRI_json_t const*>(TRI_AtVector(&json->_value._objects, i)); TRI_ASSERT(TRI_IsStringJson(keyElement)); TRI_json_t const* lhsValue = TRI_LookupObjectJson(lhs, keyElement->_value._string.data); // may be NULL TRI_json_t const* rhsValue = TRI_LookupObjectJson(rhs, keyElement->_value._string.data); // may be NULL int result = TRI_CompareValuesJson(lhsValue, rhsValue, useUTF8); if (result != 0) { return result; } } } // fall-through to returning 0 } } return 0; }
int TRI_CompareValuesJson (const TRI_json_t* const lhs, const TRI_json_t* const rhs) { // note: both lhs and rhs may be NULL! int lWeight = TypeWeight(lhs); int rWeight = TypeWeight(rhs); if (lWeight < rWeight) { return -1; } if (lWeight > rWeight) { return 1; } // lhs and rhs have equal weights if (lhs == NULL) { // both lhs and rhs are NULL, so they are equal return 0; } switch (lhs->_type) { case TRI_JSON_UNUSED: case TRI_JSON_NULL: return 0; // null == null; case TRI_JSON_BOOLEAN: if (lhs->_value._boolean == rhs->_value._boolean) { return 0; } if (!lhs->_value._boolean && rhs->_value._boolean) { return -1; } return 1; case TRI_JSON_NUMBER: if (lhs->_value._number == rhs->_value._number) { return 0; } if (lhs->_value._number < rhs->_value._number) { return -1; } return 1; case TRI_JSON_STRING: case TRI_JSON_STRING_REFERENCE: // same for STRING and STRING_REFERENCE return strcmp(lhs->_value._string.data, rhs->_value._string.data); case TRI_JSON_LIST: { size_t nl = lhs->_value._objects._length; size_t nr = rhs->_value._objects._length; size_t i, n; if (nl > nr) { n = nl; } else { n = nr; } for (i = 0; i < n; ++i) { TRI_json_t* lhsValue; TRI_json_t* rhsValue; int result; lhsValue = (i >= nl) ? NULL : TRI_AtVector(&lhs->_value._objects, i); rhsValue = (i >= nr) ? NULL : TRI_AtVector(&rhs->_value._objects, i); result = TRI_CompareValuesJson(lhsValue, rhsValue); if (result != 0) { return result; } } return 0; } case TRI_JSON_ARRAY: { TRI_json_t* keys; TRI_ASSERT(lhs->_type == TRI_JSON_ARRAY); TRI_ASSERT(rhs->_type == TRI_JSON_ARRAY); keys = GetMergedKeyList(lhs, rhs); if (keys != NULL) { size_t i, n; n = keys->_value._objects._length; for (i = 0; i < n; ++i) { TRI_json_t* keyElement; TRI_json_t* lhsValue; TRI_json_t* rhsValue; int result; keyElement = TRI_AtVector(&keys->_value._objects, i); TRI_ASSERT(TRI_IsStringJson(keyElement)); lhsValue = TRI_LookupArrayJson((TRI_json_t*) lhs, keyElement->_value._string.data); // may be NULL rhsValue = TRI_LookupArrayJson((TRI_json_t*) rhs, keyElement->_value._string.data); // may be NULL result = TRI_CompareValuesJson(lhsValue, rhsValue); if (result != 0) { TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, keys); return result; } } TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, keys); } return 0; } default: return 0; } }
TRI_json_t* TRI_IntersectListsJson (const TRI_json_t* const list1, const TRI_json_t* const list2, const bool unique) { TRI_json_t* last = NULL; TRI_json_t* result; size_t i1, i2; size_t n1, n2; TRI_ASSERT(list1); TRI_ASSERT(list1->_type == TRI_JSON_LIST); TRI_ASSERT(list2); TRI_ASSERT(list2->_type == TRI_JSON_LIST); n1 = list1->_value._objects._length; n2 = list2->_value._objects._length; // create result list result = TRI_CreateList2Json(TRI_UNKNOWN_MEM_ZONE, n1 > n2 ? n1 : n2); if (result == NULL) { return NULL; } // special case for empty lists if (n1 == 0 || n2 == 0) { return result; } // reset positions i1 = 0; i2 = 0; // iterate over lists while (i1 < n1 && i2 < n2) { // pointers to elements in both lists TRI_json_t* p1 = TRI_AtVector(&list1->_value._objects, i1); TRI_json_t* p2 = TRI_AtVector(&list2->_value._objects, i2); int compareResult = TRI_CompareValuesJson(p1, p2); if (compareResult < 0) { // left element is smaller ++i1; } else if (compareResult > 0) { // right element is smaller ++i2; } else { // both elements are equal if (! unique || last == NULL || TRI_CompareValuesJson(p1, last) > 0) { TRI_PushBackListJson(TRI_UNKNOWN_MEM_ZONE, result, p1); last = p1; if (! unique) { TRI_PushBackListJson(TRI_UNKNOWN_MEM_ZONE, result, p2); } } ++i1; ++i2; } } return result; }
static bool BytecodeShapeAccessor (TRI_shaper_t* shaper, TRI_shape_access_t* accessor) { TRI_shape_aid_t const* paids; TRI_shape_path_t const* path; TRI_shape_t const* shape; TRI_vector_pointer_t ops; size_t i; size_t j; int res; // find the shape shape = shaper->lookupShapeId(shaper, accessor->_sid); if (shape == nullptr) { LOG_ERROR("unknown shape id %llu", (unsigned long long) accessor->_sid); #ifdef TRI_ENABLE_MAINTAINER_MODE TRI_ASSERT(false); #endif return false; } // find the attribute path path = shaper->lookupAttributePathByPid(shaper, accessor->_pid); if (path == nullptr) { LOG_ERROR("unknown attribute path %llu", (unsigned long long) accessor->_pid); #ifdef TRI_ENABLE_MAINTAINER_MODE TRI_ASSERT(false); #endif return false; } paids = (TRI_shape_aid_t*) (((char const*) path) + sizeof(TRI_shape_path_t)); // collect the bytecode // we need at least 2 entries in the vector to store an accessor TRI_InitVectorPointer2(&ops, shaper->_memoryZone, 2); // and follow it for (i = 0; i < path->_aidLength; ++i, ++paids) { #ifdef DEBUG_SHAPE_ACCESSOR printf("%lu: aid: %lu, sid: %lu, type %lu\n", (unsigned long) i, (unsigned long) *paids, (unsigned long) shape->_sid, (unsigned long) shape->_type); #endif if (shape->_type == TRI_SHAPE_ARRAY) { TRI_array_shape_t* s; TRI_shape_aid_t const* aids; TRI_shape_sid_t const* sids; TRI_shape_sid_t sid; TRI_shape_size_t const* offsetsF; TRI_shape_size_t f; TRI_shape_size_t n; TRI_shape_size_t v; char const* qtr; s = (TRI_array_shape_t*) shape; f = s->_fixedEntries; v = s->_variableEntries; n = f + v; // find the aid within the shape qtr = (char const*) shape; qtr += sizeof(TRI_array_shape_t); sids = (TRI_shape_sid_t const*) qtr; qtr += n * sizeof(TRI_shape_sid_t); aids = (TRI_shape_aid_t const*) qtr; qtr += n * sizeof(TRI_shape_aid_t); offsetsF = (TRI_shape_size_t const*) qtr; // check for fixed size aid for (j = 0; j < f; ++j, ++sids, ++aids, ++offsetsF) { if (*paids == *aids) { sid = *sids; LOG_TRACE("found aid '%ld' as fixed entry with sid '%ld' and offset '%ld' - '%ld'", (unsigned long) *paids, (unsigned long) sid, (unsigned long) offsetsF[0], (unsigned long) offsetsF[1]); shape = shaper->lookupShapeId(shaper, sid); if (shape == nullptr) { LOG_ERROR("unknown shape id '%ld' for attribute id '%ld'", (unsigned long) accessor->_sid, (unsigned long) *paids); TRI_DestroyVectorPointer(&ops); return false; } res = TRI_PushBackVectorPointer(&ops, (void*) TRI_SHAPE_AC_OFFSET_FIX); if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } res = TRI_PushBackVectorPointer(&ops, (void*) (uintptr_t) (offsetsF[0])); // offset is always smaller than 4 GByte if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } res = TRI_PushBackVectorPointer(&ops, (void*) (uintptr_t) (offsetsF[1])); // offset is always smaller than 4 GByte if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } break; } } if (j < f) { continue; } // check for variable size aid for (j = 0; j < v; ++j, ++sids, ++aids) { if (*paids == *aids) { sid = *sids; LOG_TRACE("found aid '%ld' as variable entry with sid '%ld'", (unsigned long) *paids, (unsigned long) sid); shape = shaper->lookupShapeId(shaper, sid); if (shape == nullptr) { LOG_ERROR("unknown shape id '%ld' for attribute id '%ld'", (unsigned long) accessor->_sid, (unsigned long) *paids); LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } res = TRI_PushBackVectorPointer(&ops, (void*) TRI_SHAPE_AC_OFFSET_VAR); if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } res = TRI_PushBackVectorPointer(&ops, (void*) j); if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } break; } } if (j < v) { continue; } LOG_TRACE("unknown attribute id '%ld'", (unsigned long) *paids); TRI_DestroyVectorPointer(&ops); accessor->_resultSid = TRI_SHAPE_ILLEGAL; accessor->_code = nullptr; return true; } else { TRI_DestroyVectorPointer(&ops); accessor->_resultSid = TRI_SHAPE_ILLEGAL; accessor->_code = nullptr; return true; } } // travel attribute path to the end res = TRI_PushBackVectorPointer(&ops, (void*) TRI_SHAPE_AC_DONE); if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("out of memory"); TRI_DestroyVectorPointer(&ops); return false; } // remember resulting sid accessor->_resultSid = shape->_sid; // steal buffer from ops vector so we don't need to copy it accessor->_code = const_cast<void const**>(ops._buffer); // inform the vector that we took over ownership ops._buffer = nullptr; TRI_DestroyVectorPointer(&ops); return true; }
static uint64_t HashJsonRecursive (uint64_t hash, TRI_json_t const* object) { size_t n; size_t i; uint64_t tmphash; TRI_json_t const* subjson; if (0 == object) { return HashBlock(hash, "null", 4); // strlen("null") } switch (object->_type) { case TRI_JSON_UNUSED: { return hash; } case TRI_JSON_NULL: { return HashBlock(hash, "null", 4); // strlen("null") } case TRI_JSON_BOOLEAN: { if (object->_value._boolean) { return HashBlock(hash, "true", 4); // strlen("true") } else { return HashBlock(hash, "false", 5); // strlen("true") } } case TRI_JSON_NUMBER: { return HashBlock(hash, (char const*) &(object->_value._number), sizeof(object->_value._number)); } case TRI_JSON_STRING: case TRI_JSON_STRING_REFERENCE: { return HashBlock(hash, object->_value._string.data, object->_value._string.length); } case TRI_JSON_ARRAY: { hash = HashBlock(hash, "array", 5); // strlen("array") n = object->_value._objects._length; tmphash = hash; for (i = 0; i < n; i += 2) { subjson = (const TRI_json_t*) TRI_AtVector(&object->_value._objects, i); TRI_ASSERT(TRI_IsStringJson(subjson)); tmphash ^= HashJsonRecursive(hash, subjson); subjson = (const TRI_json_t*) TRI_AtVector(&object->_value._objects, i+1); tmphash ^= HashJsonRecursive(hash, subjson); } return tmphash; } case TRI_JSON_LIST: { hash = HashBlock(hash, "list", 4); // strlen("list") n = object->_value._objects._length; for (i = 0; i < n; ++i) { subjson = (const TRI_json_t*) TRI_AtVector(&object->_value._objects, i); hash = HashJsonRecursive(hash, subjson); } return hash; } } return hash; // never reached }
int TRI_RemoveElementHashArrayMulti (TRI_hash_array_multi_t* array, TRI_index_search_value_t const* key, TRI_hash_index_element_multi_t* element) { uint64_t const n = array->_nrAlloc; uint64_t i, k; i = k = HashKey(array, key) % n; for (; i < n && array->_table[i]._document != nullptr && ! IsEqualKeyElement(array, key, &array->_table[i]); ++i); if (i == n) { for (i = 0; i < k && array->_table[i]._document != nullptr && ! IsEqualKeyElement(array, key, &array->_table[i]); ++i); } TRI_ASSERT_EXPENSIVE(i < n); TRI_hash_index_element_multi_t* arrayElement = &array->_table[i]; bool found = (arrayElement->_document != nullptr); if (! found) { return TRI_RESULT_ELEMENT_NOT_FOUND; } if (arrayElement->_document != element->_document) { // look in the overflow list for the sought document auto next = &(arrayElement->_next); while (*next != nullptr) { if ((*next)->_document == element->_document) { auto ptr = (*next)->_next; DestroyElement(array, *next); ReturnToFreelist(array, *next); *next = ptr; return TRI_ERROR_NO_ERROR; } next = &((*next)->_next); } return TRI_RESULT_ELEMENT_NOT_FOUND; } // the element itself is the document to remove TRI_ASSERT(arrayElement->_document == element->_document); if (arrayElement->_next != nullptr) { auto next = arrayElement->_next; // destroy our own data first, otherwise we'll leak TRI_ASSERT(arrayElement->_subObjects != nullptr); TRI_Free(TRI_UNKNOWN_MEM_ZONE, arrayElement->_subObjects); // copy data from first overflow element into ourselves arrayElement->_document = next->_document; arrayElement->_subObjects = next->_subObjects; arrayElement->_next = next->_next; // and remove the first overflow element next->_subObjects = nullptr; DestroyElement(array, next); ReturnToFreelist(array, next); return TRI_ERROR_NO_ERROR; } TRI_ASSERT(arrayElement->_next == nullptr); DestroyElement(array, arrayElement); array->_nrUsed--; // ........................................................................... // and now check the following places for items to move here // ........................................................................... k = TRI_IncModU64(i, n); while (array->_table[k]._document != nullptr) { uint64_t j = HashElement(array, &array->_table[k]) % n; if ((i < k && ! (i < j && j <= k)) || (k < i && ! (i < j || j <= k))) { array->_table[i] = array->_table[k]; array->_table[k]._document = nullptr; array->_table[k]._next = nullptr; array->_table[k]._subObjects = nullptr; i = k; } k = TRI_IncModU64(k, n); } if (array->_nrUsed == 0) { TRI_ASSERT(array->_nrOverflowUsed == 0); ResizeHashArray(array, InitialSize(), true); } return TRI_ERROR_NO_ERROR; }
void TRI_InitErrorAql (TRI_aql_error_t* const error) { TRI_ASSERT(error); error->_code = TRI_ERROR_NO_ERROR; error->_data = NULL; }