TEST_F(RangeTest, testRangeIterator) { NumericRangeTree *t = NewNumericRangeTree(); ASSERT_TRUE(t != NULL); const size_t N = 100000; std::vector<double> lookup; std::vector<uint8_t> matched; lookup.resize(N + 1); matched.resize(N + 1); for (size_t i = 0; i < N; i++) { t_docId docId = i + 1; double value = (double)(1 + prng() % (N / 5)); lookup[docId] = value; // printf("Adding %d > %f\n", docId, value); NumericRangeTree_Add(t, docId, value); } for (size_t i = 0; i < 5; i++) { double min = (double)(1 + prng() % (N / 5)); double max = (double)(1 + prng() % (N / 5)); memset(&matched[0], 0, sizeof(uint8_t) * (N + 1)); NumericFilter *flt = NewNumericFilter(std::min(min, max), std::max(min, max), 1, 1); // count the number of elements in the range size_t count = 0; for (size_t i = 1; i <= N; i++) { if (NumericFilter_Match(flt, lookup[i])) { matched[i] = 1; count++; } } // printf("Testing range %f..%f, should have %d docs\n", min, max, count); IndexIterator *it = createNumericIterator(NULL, t, flt); int xcount = 0; RSIndexResult *res = NULL; while (IITER_HAS_NEXT(it)) { int rc = it->Read(it->ctx, &res); if (rc == INDEXREAD_EOF) { break; } ASSERT_EQ(matched[res->docId], 1); if (res->type == RSResultType_Union) { res = res->agg.children[0]; } matched[res->docId] = (uint8_t)2; // printf("rc: %d docId: %d, n %f lookup %f, flt %f..%f\n", rc, res->docId, res->num.value, // lookup[res->docId], flt->min, flt->max); ASSERT_EQ(res->num.value, lookup[res->docId]); ASSERT_TRUE(NumericFilter_Match(flt, lookup[res->docId])); ASSERT_EQ(res->type, RSResultType_Numeric); // ASSERT_EQUAL(res->agg.typeMask, RSResultType_Virtual); ASSERT_TRUE(!RSIndexResult_HasOffsets(res)); ASSERT_TRUE(!RSIndexResult_IsAggregate(res)); ASSERT_TRUE(res->docId > 0); ASSERT_EQ(res->fieldMask, RS_FIELDMASK_ALL); xcount++; } for (int i = 1; i <= N; i++) { if (matched[i] == 1) { printf("Miss: %d\n", i); } } // printf("The iterator returned %d elements\n", xcount); ASSERT_EQ(xcount, count); it->Free(it); NumericFilter_Free(flt); } ASSERT_EQ(t->numRanges, 14); ASSERT_EQ(t->numEntries, N); NumericRangeTree_Free(t); }
QueryResult *Query_Execute(Query *query) { //__queryStage_Print(query->root, 0); QueryResult *res = malloc(sizeof(QueryResult)); res->error = 0; res->errorString = NULL; res->totalResults = 0; res->ids = NULL; res->numIds = 0; int num = query->offset + query->limit; heap_t *pq = malloc(heap_sizeof(num)); heap_init(pq, cmpHits, NULL, num); // start lazy evaluation of all query steps IndexIterator *it = NULL; if (query->root != NULL) { it = Query_EvalStage(query, query->root); } // no query evaluation plan? if (query->root == NULL || it == NULL) { res->error = QUERY_ERROR_INTERNAL; res->errorString = QUERY_ERROR_INTERNAL_STR; return res; } IndexHit *pooledHit = NULL; // iterate the root iterator and push everything to the PQ while (1) { // TODO - Use static allocation if (pooledHit == NULL) { pooledHit = malloc(sizeof(IndexHit)); } IndexHit *h = pooledHit; IndexHit_Init(h); int rc = it->Read(it->ctx, h); if (rc == INDEXREAD_EOF) { break; } else if (rc == INDEXREAD_NOTFOUND) { continue; } h->totalFreq = processHitScore(h, query->docTable); ++res->totalResults; if (heap_count(pq) < heap_size(pq)) { heap_offerx(pq, h); pooledHit = NULL; } else { IndexHit *qh = heap_peek(pq); if (qh->totalFreq < h->totalFreq) { pooledHit = heap_poll(pq); heap_offerx(pq, h); // IndexHit_Terminate(pooledHit); } else { pooledHit = h; // IndexHit_Terminate(pooledHit); } } } if (pooledHit) { free(pooledHit); } it->Free(it); // Reverse the results into the final result size_t n = MIN(heap_count(pq), query->limit); res->numIds = n; res->ids = calloc(n, sizeof(RedisModuleString *)); for (int i = 0; i < n; ++i) { IndexHit *h = heap_poll(pq); LG_DEBUG("Popping %d freq %f", h->docId, h->totalFreq); res->ids[n - i - 1] = Redis_GetDocKey(query->ctx, h->docId); free(h); } // if we still have something in the heap (meaning offset > 0), we need to // poll... while (heap_count(pq) > 0) { IndexHit *h = heap_poll(pq); free(h); } heap_free(pq); return res; }