bool DistinctExecutor::p_execute(const NValueArray ¶ms) { DistinctPlanNode* node = dynamic_cast<DistinctPlanNode*>(m_abstractNode); assert(node); Table* output_table = node->getOutputTable(); assert(output_table); Table* input_table = node->getInputTables()[0]; assert(input_table); TableIterator iterator = input_table->iterator(); TableTuple tuple(input_table->schema()); // substitute params for distinct expression AbstractExpression *distinctExpression = node->getDistinctExpression(); distinctExpression->substitute(params); std::set<NValue, NValue::ltNValue> found_values; while (iterator.next(tuple)) { // // Check whether this value already exists in our list // NValue tuple_value = distinctExpression->eval(&tuple, NULL); if (found_values.find(tuple_value) == found_values.end()) { found_values.insert(tuple_value); if (!output_table->insertTuple(tuple)) { VOLT_ERROR("Failed to insert tuple from input table '%s' into" " output table '%s'", input_table->name().c_str(), output_table->name().c_str()); return false; } } } return true; }
TEST_F(FilterTest, SubstituteFilter) { // WHERE id <= 20 AND val4=$1 // shared_ptr<AbstractExpression> equal1 // = ComparisonExpression::getInstance(EXPRESSION_TYPE_COMPARE_LESSTHANOREQUALTO, TupleValueExpression::getInstance(0), ConstantValueExpression::getInstance(voltdb::Value::newBigIntValue(20))); // // shared_ptr<AbstractExpression> equal2 // = ComparisonExpression::getInstance(EXPRESSION_TYPE_COMPARE_EQUAL, TupleValueExpression::getInstance(4), ParameterValueExpression::getInstance(0)); // // ConjunctionExpression predicate(EXPRESSION_TYPE_CONJUNCTION_AND, equal1, equal2); AbstractExpression *tv1 = new TupleValueExpression(0, std::string("tablename"), std::string("colname")); AbstractExpression *cv1 = constantValueFactory(ValueFactory::getBigIntValue(20)); AbstractExpression *equal1 = comparisonFactory(EXPRESSION_TYPE_COMPARE_LESSTHANOREQUALTO, tv1, cv1); AbstractExpression *tv2 = new TupleValueExpression(4, std::string("tablename"), std::string("colname")); AbstractExpression *pv2 = parameterValueFactory(0); AbstractExpression *equal2 = comparisonFactory(EXPRESSION_TYPE_COMPARE_EQUAL, tv2, pv2); AbstractExpression *predicate = conjunctionFactory(EXPRESSION_TYPE_CONJUNCTION_AND, equal1, equal2); // ::printf("\nFilter:%s\n", predicate->debug().c_str()); for (int64_t implantedValue = 1; implantedValue < 5; ++implantedValue) { NValueArray params(1); params[0] = ValueFactory::getBigIntValue(implantedValue); predicate->substitute(params); // ::printf("\nSubstituted Filter:%s\n", predicate->debug().c_str()); // ::printf("\tLEFT: %s\n", predicate->getLeft()->debug().c_str()); // ::printf("\tRIGHT: %s\n", predicate->getRight()->debug().c_str()); int count = 0; TableIterator iter = table->iterator(); TableTuple match(table->schema()); while (iter.next(match)) { if (predicate->eval(&match, NULL).isTrue()) { ++count; } } ASSERT_EQ(3, count); } delete predicate; }
bool SeqScanExecutor::p_execute(const NValueArray ¶ms) { SeqScanPlanNode* node = dynamic_cast<SeqScanPlanNode*>(m_abstractNode); assert(node); Table* output_table = node->getOutputTable(); assert(output_table); Table* target_table = dynamic_cast<Table*>(node->getTargetTable()); assert(target_table); //cout << "SeqScanExecutor: node id" << node->getPlanNodeId() << endl; VOLT_TRACE("Sequential Scanning table :\n %s", target_table->debug().c_str()); VOLT_DEBUG("Sequential Scanning table : %s which has %d active, %d" " allocated, %d used tuples", target_table->name().c_str(), (int)target_table->activeTupleCount(), (int)target_table->allocatedTupleCount(), (int)target_table->usedTupleCount()); // // OPTIMIZATION: NESTED PROJECTION // // Since we have the input params, we need to call substitute to // change any nodes in our expression tree to be ready for the // projection operations in execute // int num_of_columns = (int)output_table->columnCount(); ProjectionPlanNode* projection_node = dynamic_cast<ProjectionPlanNode*>(node->getInlinePlanNode(PLAN_NODE_TYPE_PROJECTION)); if (projection_node != NULL) { for (int ctr = 0; ctr < num_of_columns; ctr++) { assert(projection_node->getOutputColumnExpressions()[ctr]); projection_node->getOutputColumnExpressions()[ctr]->substitute(params); } } // // OPTIMIZATION: NESTED LIMIT // How nice! We can also cut off our scanning with a nested limit! // int limit = -1; int offset = -1; LimitPlanNode* limit_node = dynamic_cast<LimitPlanNode*>(node->getInlinePlanNode(PLAN_NODE_TYPE_LIMIT)); if (limit_node != NULL) { limit_node->getLimitAndOffsetByReference(params, limit, offset); } // // OPTIMIZATION: // // If there is no predicate and no Projection for this SeqScan, // then we have already set the node's OutputTable to just point // at the TargetTable. Therefore, there is nothing we more we need // to do here // if (node->getPredicate() != NULL || projection_node != NULL || limit_node != NULL) { // // Just walk through the table using our iterator and apply // the predicate to each tuple. For each tuple that satisfies // our expression, we'll insert them into the output table. // TableTuple tuple(target_table->schema()); TableIterator iterator = target_table->iterator(); AbstractExpression *predicate = node->getPredicate(); VOLT_TRACE("SCAN PREDICATE A:\n%s\n", predicate->debug(true).c_str()); if (predicate) { predicate->substitute(params); assert(predicate != NULL); VOLT_DEBUG("SCAN PREDICATE B:\n%s\n", predicate->debug(true).c_str()); } int tuple_ctr = 0; int tuple_skipped = 0; while (iterator.next(tuple)) { VOLT_TRACE("INPUT TUPLE: %s, %d/%d\n", tuple.debug(target_table->name()).c_str(), tuple_ctr, (int)target_table->activeTupleCount()); // // For each tuple we need to evaluate it against our predicate // if (predicate == NULL || predicate->eval(&tuple, NULL).isTrue()) { // Check if we have to skip this tuple because of offset if (tuple_skipped < offset) { tuple_skipped++; continue; } // // Nested Projection // Project (or replace) values from input tuple // if (projection_node != NULL) { TableTuple &temp_tuple = output_table->tempTuple(); for (int ctr = 0; ctr < num_of_columns; ctr++) { NValue value = projection_node-> getOutputColumnExpressions()[ctr]->eval(&tuple, NULL); temp_tuple.setNValue(ctr, value); } if (!output_table->insertTuple(temp_tuple)) { VOLT_ERROR("Failed to insert tuple from table '%s' into" " output table '%s'", target_table->name().c_str(), output_table->name().c_str()); return false; } } else { // // Insert the tuple into our output table // if (!output_table->insertTuple(tuple)) { VOLT_ERROR("Failed to insert tuple from table '%s' into" " output table '%s'", target_table->name().c_str(), output_table->name().c_str()); return false; } } ++tuple_ctr; // Check whether we have gone past our limit if (limit >= 0 && tuple_ctr >= limit) { break; } } } } VOLT_TRACE("\n%s\n", output_table->debug().c_str()); VOLT_DEBUG("Finished Seq scanning"); return true; }
bool IndexScanExecutor::p_execute(const NValueArray ¶ms) { assert(m_node); assert(m_node == dynamic_cast<IndexScanPlanNode*>(m_abstractNode)); assert(m_outputTable); assert(m_outputTable == static_cast<TempTable*>(m_node->getOutputTable())); // update local target table with its most recent reference Table* targetTable = m_node->getTargetTable(); TableIndex *tableIndex = targetTable->index(m_node->getTargetIndexName()); TableTuple searchKey(tableIndex->getKeySchema()); searchKey.moveNoHeader(m_searchKeyBackingStore); assert(m_lookupType != INDEX_LOOKUP_TYPE_EQ || searchKey.getSchema()->columnCount() == m_numOfSearchkeys); int activeNumOfSearchKeys = m_numOfSearchkeys; IndexLookupType localLookupType = m_lookupType; SortDirectionType localSortDirection = m_sortDirection; // INLINE PROJECTION // Set params to expression tree via substitute() assert(m_numOfColumns == m_outputTable->columnCount()); if (m_projectionNode != NULL && m_projectionAllTupleArray == NULL) { for (int ctr = 0; ctr < m_numOfColumns; ctr++) { assert(m_projectionNode->getOutputColumnExpressions()[ctr]); m_projectionExpressions[ctr]->substitute(params); assert(m_projectionExpressions[ctr]); } } // // INLINE LIMIT // LimitPlanNode* limit_node = dynamic_cast<LimitPlanNode*>(m_abstractNode->getInlinePlanNode(PLAN_NODE_TYPE_LIMIT)); // // SEARCH KEY // searchKey.setAllNulls(); VOLT_TRACE("Initial (all null) search key: '%s'", searchKey.debugNoHeader().c_str()); for (int ctr = 0; ctr < activeNumOfSearchKeys; ctr++) { m_searchKeyArray[ctr]->substitute(params); NValue candidateValue = m_searchKeyArray[ctr]->eval(NULL, NULL); try { searchKey.setNValue(ctr, candidateValue); } catch (const SQLException &e) { // This next bit of logic handles underflow and overflow while // setting up the search keys. // e.g. TINYINT > 200 or INT <= 6000000000 // re-throw if not an overflow or underflow // currently, it's expected to always be an overflow or underflow if ((e.getInternalFlags() & (SQLException::TYPE_OVERFLOW | SQLException::TYPE_UNDERFLOW)) == 0) { throw e; } // handle the case where this is a comparison, rather than equality match // comparison is the only place where the executor might return matching tuples // e.g. TINYINT < 1000 should return all values if ((localLookupType != INDEX_LOOKUP_TYPE_EQ) && (ctr == (activeNumOfSearchKeys - 1))) { if (e.getInternalFlags() & SQLException::TYPE_OVERFLOW) { if ((localLookupType == INDEX_LOOKUP_TYPE_GT) || (localLookupType == INDEX_LOOKUP_TYPE_GTE)) { // gt or gte when key overflows returns nothing return true; } else { // for overflow on reverse scan, we need to // do a forward scan to find the correct start // point, which is exactly what LTE would do. // so, set the lookupType to LTE and the missing // searchkey will be handled by extra post filters localLookupType = INDEX_LOOKUP_TYPE_LTE; } } if (e.getInternalFlags() & SQLException::TYPE_UNDERFLOW) { if ((localLookupType == INDEX_LOOKUP_TYPE_LT) || (localLookupType == INDEX_LOOKUP_TYPE_LTE)) { // lt or lte when key underflows returns nothing return true; } else { // don't allow GTE because it breaks null handling localLookupType = INDEX_LOOKUP_TYPE_GT; } } // if here, means all tuples with the previous searchkey // columns need to be scaned. Note, if only one column, // then all tuples will be scanned activeNumOfSearchKeys--; if (localSortDirection == SORT_DIRECTION_TYPE_INVALID) { localSortDirection = SORT_DIRECTION_TYPE_ASC; } } // if a EQ comparison is out of range, then return no tuples else { return true; } break; } } assert((activeNumOfSearchKeys == 0) || (searchKey.getSchema()->columnCount() > 0)); VOLT_TRACE("Search key after substitutions: '%s'", searchKey.debugNoHeader().c_str()); // // END EXPRESSION // AbstractExpression* end_expression = m_node->getEndExpression(); if (end_expression != NULL) { end_expression->substitute(params); VOLT_DEBUG("End Expression:\n%s", end_expression->debug(true).c_str()); } // // POST EXPRESSION // AbstractExpression* post_expression = m_node->getPredicate(); if (post_expression != NULL) { post_expression->substitute(params); VOLT_DEBUG("Post Expression:\n%s", post_expression->debug(true).c_str()); } // INITIAL EXPRESSION AbstractExpression* initial_expression = m_node->getInitialExpression(); if (initial_expression != NULL) { initial_expression->substitute(params); VOLT_DEBUG("Initial Expression:\n%s", initial_expression->debug(true).c_str()); } // // SKIP NULL EXPRESSION // AbstractExpression* skipNullExpr = m_node->getSkipNullPredicate(); // For reverse scan edge case NULL values and forward scan underflow case. if (skipNullExpr != NULL) { skipNullExpr->substitute(params); VOLT_DEBUG("COUNT NULL Expression:\n%s", skipNullExpr->debug(true).c_str()); } ProgressMonitorProxy pmp(m_engine, targetTable); // // An index scan has three parts: // (1) Lookup tuples using the search key // (2) For each tuple that comes back, check whether the // end_expression is false. // If it is, then we stop scanning. Otherwise... // (3) Check whether the tuple satisfies the post expression. // If it does, then add it to the output table // // Use our search key to prime the index iterator // Now loop through each tuple given to us by the iterator // TableTuple tuple; if (activeNumOfSearchKeys > 0) { VOLT_TRACE("INDEX_LOOKUP_TYPE(%d) m_numSearchkeys(%d) key:%s", localLookupType, activeNumOfSearchKeys, searchKey.debugNoHeader().c_str()); if (localLookupType == INDEX_LOOKUP_TYPE_EQ) { tableIndex->moveToKey(&searchKey); } else if (localLookupType == INDEX_LOOKUP_TYPE_GT) { tableIndex->moveToGreaterThanKey(&searchKey); } else if (localLookupType == INDEX_LOOKUP_TYPE_GTE) { tableIndex->moveToKeyOrGreater(&searchKey); } else if (localLookupType == INDEX_LOOKUP_TYPE_LT) { tableIndex->moveToLessThanKey(&searchKey); } else if (localLookupType == INDEX_LOOKUP_TYPE_LTE) { // find the entry whose key is greater than search key, // do a forward scan using initialExpr to find the correct // start point to do reverse scan bool isEnd = tableIndex->moveToGreaterThanKey(&searchKey); if (isEnd) { tableIndex->moveToEnd(false); } else { while (!(tuple = tableIndex->nextValue()).isNullTuple()) { pmp.countdownProgress(); if (initial_expression != NULL && !initial_expression->eval(&tuple, NULL).isTrue()) { // just passed the first failed entry, so move 2 backward tableIndex->moveToBeforePriorEntry(); break; } } if (tuple.isNullTuple()) { tableIndex->moveToEnd(false); } } } else { return false; } } else { bool toStartActually = (localSortDirection != SORT_DIRECTION_TYPE_DESC); tableIndex->moveToEnd(toStartActually); } int tuple_ctr = 0; int tuples_skipped = 0; // for offset int limit = -1; int offset = -1; if (limit_node != NULL) { limit_node->getLimitAndOffsetByReference(params, limit, offset); } // // We have to different nextValue() methods for different lookup types // while ((limit == -1 || tuple_ctr < limit) && ((localLookupType == INDEX_LOOKUP_TYPE_EQ && !(tuple = tableIndex->nextValueAtKey()).isNullTuple()) || ((localLookupType != INDEX_LOOKUP_TYPE_EQ || activeNumOfSearchKeys == 0) && !(tuple = tableIndex->nextValue()).isNullTuple()))) { VOLT_TRACE("LOOPING in indexscan: tuple: '%s'\n", tuple.debug("tablename").c_str()); pmp.countdownProgress(); // // First check to eliminate the null index rows for UNDERFLOW case only // if (skipNullExpr != NULL) { if (skipNullExpr->eval(&tuple, NULL).isTrue()) { VOLT_DEBUG("Index scan: find out null rows or columns."); continue; } else { skipNullExpr = NULL; } } // // First check whether the end_expression is now false // if (end_expression != NULL && !end_expression->eval(&tuple, NULL).isTrue()) { VOLT_TRACE("End Expression evaluated to false, stopping scan"); break; } // // Then apply our post-predicate to do further filtering // if (post_expression == NULL || post_expression->eval(&tuple, NULL).isTrue()) { // // INLINE OFFSET // if (tuples_skipped < offset) { tuples_skipped++; continue; } tuple_ctr++; if (m_projectionNode != NULL) { TableTuple &temp_tuple = m_outputTable->tempTuple(); if (m_projectionAllTupleArray != NULL) { VOLT_TRACE("sweet, all tuples"); for (int ctr = m_numOfColumns - 1; ctr >= 0; --ctr) { temp_tuple.setNValue(ctr, tuple.getNValue(m_projectionAllTupleArray[ctr])); } } else { for (int ctr = m_numOfColumns - 1; ctr >= 0; --ctr) { temp_tuple.setNValue(ctr, m_projectionExpressions[ctr]->eval(&tuple, NULL)); } } m_outputTable->insertTupleNonVirtual(temp_tuple); } else // // Straight Insert // { // // Try to put the tuple into our output table // m_outputTable->insertTupleNonVirtual(tuple); } pmp.countdownProgress(); } } VOLT_DEBUG ("Index Scanned :\n %s", m_outputTable->debug().c_str()); return true; }
bool NestLoopExecutor::p_execute(const NValueArray ¶ms) { VOLT_DEBUG("executing NestLoop..."); NestLoopPlanNode* node = dynamic_cast<NestLoopPlanNode*>(m_abstractNode); assert(node); assert(node->getInputTables().size() == 2); Table* output_table_ptr = node->getOutputTable(); assert(output_table_ptr); // output table must be a temp table TempTable* output_table = dynamic_cast<TempTable*>(output_table_ptr); assert(output_table); Table* outer_table = node->getInputTables()[0]; assert(outer_table); Table* inner_table = node->getInputTables()[1]; assert(inner_table); VOLT_TRACE ("input table left:\n %s", outer_table->debug().c_str()); VOLT_TRACE ("input table right:\n %s", inner_table->debug().c_str()); // // Pre Join Expression // AbstractExpression *preJoinPredicate = node->getPreJoinPredicate(); if (preJoinPredicate) { preJoinPredicate->substitute(params); VOLT_TRACE ("Pre Join predicate: %s", preJoinPredicate == NULL ? "NULL" : preJoinPredicate->debug(true).c_str()); } // // Join Expression // AbstractExpression *joinPredicate = node->getJoinPredicate(); if (joinPredicate) { joinPredicate->substitute(params); VOLT_TRACE ("Join predicate: %s", joinPredicate == NULL ? "NULL" : joinPredicate->debug(true).c_str()); } // // Where Expression // AbstractExpression *wherePredicate = node->getWherePredicate(); if (wherePredicate) { wherePredicate->substitute(params); VOLT_TRACE ("Where predicate: %s", wherePredicate == NULL ? "NULL" : wherePredicate->debug(true).c_str()); } // Join type JoinType join_type = node->getJoinType(); assert(join_type == JOIN_TYPE_INNER || join_type == JOIN_TYPE_LEFT); int outer_cols = outer_table->columnCount(); int inner_cols = inner_table->columnCount(); TableTuple outer_tuple(node->getInputTables()[0]->schema()); TableTuple inner_tuple(node->getInputTables()[1]->schema()); TableTuple &joined = output_table->tempTuple(); TableTuple null_tuple = m_null_tuple; TableIterator iterator0 = outer_table->iterator(); while (iterator0.next(outer_tuple)) { // did this loop body find at least one match for this tuple? bool match = false; // For outer joins if outer tuple fails pre-join predicate // (join expression based on the outer table only) // it can't match any of inner tuples if (preJoinPredicate == NULL || preJoinPredicate->eval(&outer_tuple, NULL).isTrue()) { // populate output table's temp tuple with outer table's values // probably have to do this at least once - avoid doing it many // times per outer tuple joined.setNValues(0, outer_tuple, 0, outer_cols); TableIterator iterator1 = inner_table->iterator(); while (iterator1.next(inner_tuple)) { // Apply join filter to produce matches for each outer that has them, // then pad unmatched outers, then filter them all if (joinPredicate == NULL || joinPredicate->eval(&outer_tuple, &inner_tuple).isTrue()) { match = true; // Filter the joined tuple if (wherePredicate == NULL || wherePredicate->eval(&outer_tuple, &inner_tuple).isTrue()) { // Matched! Complete the joined tuple with the inner column values. joined.setNValues(outer_cols, inner_tuple, 0, inner_cols); output_table->insertTupleNonVirtual(joined); } } } } // // Left Outer Join // if (join_type == JOIN_TYPE_LEFT && !match) { // Still needs to pass the filter if (wherePredicate == NULL || wherePredicate->eval(&outer_tuple, &null_tuple).isTrue()) { joined.setNValues(outer_cols, null_tuple, 0, inner_cols); output_table->insertTupleNonVirtual(joined); } } } return (true); }
bool IndexCountExecutor::p_execute(const NValueArray ¶ms) { assert(m_node); assert(m_node == dynamic_cast<IndexCountPlanNode*>(m_abstractNode)); assert(m_outputTable); assert(m_outputTable == static_cast<TempTable*>(m_node->getOutputTable())); assert(m_targetTable); assert(m_targetTable == m_node->getTargetTable()); VOLT_DEBUG("IndexCount: %s.%s\n", m_targetTable->name().c_str(), m_index->getName().c_str()); int activeNumOfSearchKeys = m_numOfSearchkeys; IndexLookupType localLookupType = m_lookupType; bool searchKeyUnderflow = false, endKeyOverflow = false; // Overflow cases that can return early without accessing the index need this // default 0 count as their result. TableTuple& tmptup = m_outputTable->tempTuple(); tmptup.setNValue(0, ValueFactory::getBigIntValue( 0 )); // // SEARCH KEY // if (m_numOfSearchkeys != 0) { m_searchKey.setAllNulls(); VOLT_DEBUG("<Index Count>Initial (all null) search key: '%s'", m_searchKey.debugNoHeader().c_str()); for (int ctr = 0; ctr < activeNumOfSearchKeys; ctr++) { m_searchKeyArray[ctr]->substitute(params); NValue candidateValue = m_searchKeyArray[ctr]->eval(NULL, NULL); try { m_searchKey.setNValue(ctr, candidateValue); } catch (const SQLException &e) { // This next bit of logic handles underflow and overflow while // setting up the search keys. // e.g. TINYINT > 200 or INT <= 6000000000 // re-throw if not an overflow or underflow // currently, it's expected to always be an overflow or underflow if ((e.getInternalFlags() & (SQLException::TYPE_OVERFLOW | SQLException::TYPE_UNDERFLOW)) == 0) { throw e; } // handle the case where this is a comparison, rather than equality match // comparison is the only place where the executor might return matching tuples // e.g. TINYINT < 1000 should return all values if ((localLookupType != INDEX_LOOKUP_TYPE_EQ) && (ctr == (activeNumOfSearchKeys - 1))) { assert (localLookupType == INDEX_LOOKUP_TYPE_GT || localLookupType == INDEX_LOOKUP_TYPE_GTE); if (e.getInternalFlags() & SQLException::TYPE_OVERFLOW) { m_outputTable->insertTuple(tmptup); return true; } else if (e.getInternalFlags() & SQLException::TYPE_UNDERFLOW) { searchKeyUnderflow = true; break; } else { throw e; } } // if a EQ comparision is out of range, then return no tuples else { m_outputTable->insertTuple(tmptup); return true; } break; } } VOLT_TRACE("Search key after substitutions: '%s'", m_searchKey.debugNoHeader().c_str()); } if (m_numOfEndkeys != 0) { // // END KEY // m_endKey.setAllNulls(); VOLT_DEBUG("Initial (all null) end key: '%s'", m_endKey.debugNoHeader().c_str()); for (int ctr = 0; ctr < m_numOfEndkeys; ctr++) { m_endKeyArray[ctr]->substitute(params); NValue endKeyValue = m_endKeyArray[ctr]->eval(NULL, NULL); try { m_endKey.setNValue(ctr, endKeyValue); } catch (const SQLException &e) { // This next bit of logic handles underflow and overflow while // setting up the search keys. // e.g. TINYINT > 200 or INT <= 6000000000 // re-throw if not an overflow or underflow // currently, it's expected to always be an overflow or underflow if ((e.getInternalFlags() & (SQLException::TYPE_OVERFLOW | SQLException::TYPE_UNDERFLOW)) == 0) { throw e; } if (ctr == (m_numOfEndkeys - 1)) { assert (m_endType == INDEX_LOOKUP_TYPE_LT || m_endType == INDEX_LOOKUP_TYPE_LTE); if (e.getInternalFlags() & SQLException::TYPE_UNDERFLOW) { m_outputTable->insertTuple(tmptup); return true; } else if (e.getInternalFlags() & SQLException::TYPE_OVERFLOW) { endKeyOverflow = true; const ValueType type = m_endKey.getSchema()->columnType(ctr); NValue tmpEndKeyValue = ValueFactory::getBigIntValue(getMaxTypeValue(type)); m_endKey.setNValue(ctr, tmpEndKeyValue); VOLT_DEBUG("<Index count> end key out of range, MAX value: %ld...\n", (long)getMaxTypeValue(type)); break; } else { throw e; } } // if a EQ comparision is out of range, then return no tuples else { m_outputTable->insertTuple(tmptup); return true; } break; } } VOLT_TRACE("End key after substitutions: '%s'", m_endKey.debugNoHeader().c_str()); } // // POST EXPRESSION // assert (m_node->getPredicate() == NULL); assert (m_index); assert (m_index == m_targetTable->index(m_node->getTargetIndexName())); assert (m_index->isCountableIndex()); // // COUNT NULL EXPRESSION // AbstractExpression* countNULLExpr = m_node->getSkipNullPredicate(); // For reverse scan edge case NULL values and forward scan underflow case. if (countNULLExpr != NULL) { countNULLExpr->substitute(params); VOLT_DEBUG("COUNT NULL Expression:\n%s", countNULLExpr->debug(true).c_str()); } bool reverseScanNullEdgeCase = false; bool reverseScanMovedIndexToScan = false; if (m_numOfSearchkeys < m_numOfEndkeys && (m_endType == INDEX_LOOKUP_TYPE_LT || m_endType == INDEX_LOOKUP_TYPE_LTE)) { reverseScanNullEdgeCase = true; VOLT_DEBUG("Index count: reverse scan edge null case." ); } // An index count has two cases: unique and non-unique int64_t rkStart = 0, rkEnd = 0, rkRes = 0; int leftIncluded = 0, rightIncluded = 0; if (m_numOfSearchkeys != 0) { // Deal with multi-map VOLT_DEBUG("INDEX_LOOKUP_TYPE(%d) m_numSearchkeys(%d) key:%s", localLookupType, activeNumOfSearchKeys, m_searchKey.debugNoHeader().c_str()); if (searchKeyUnderflow == false) { if (localLookupType == INDEX_LOOKUP_TYPE_GT) { rkStart = m_index->getCounterLET(&m_searchKey, true); } else { // handle start inclusive cases. if (m_index->hasKey(&m_searchKey)) { leftIncluded = 1; rkStart = m_index->getCounterLET(&m_searchKey, false); if (reverseScanNullEdgeCase) { m_index->moveToKeyOrGreater(&m_searchKey); reverseScanMovedIndexToScan = true; } } else { rkStart = m_index->getCounterLET(&m_searchKey, true); } } } else { // Do not count null row or columns m_index->moveToKeyOrGreater(&m_searchKey); assert(countNULLExpr); long numNULLs = countNulls(countNULLExpr); rkStart += numNULLs; VOLT_DEBUG("Index count[underflow case]: find out %ld null rows or columns are not counted in.", numNULLs); } } if (reverseScanNullEdgeCase) { // reverse scan case if (!reverseScanMovedIndexToScan && localLookupType != INDEX_LOOKUP_TYPE_GT) { m_index->moveToEnd(true); } assert(countNULLExpr); long numNULLs = countNulls(countNULLExpr); rkStart += numNULLs; VOLT_DEBUG("Index count[reverse case]: find out %ld null rows or columns are not counted in.", numNULLs); } if (m_numOfEndkeys != 0) { if (endKeyOverflow) { rkEnd = m_index->getCounterGET(&m_endKey, true); } else { IndexLookupType localEndType = m_endType; if (localEndType == INDEX_LOOKUP_TYPE_LT) { rkEnd = m_index->getCounterGET(&m_endKey, false); } else { if (m_index->hasKey(&m_endKey)) { rightIncluded = 1; rkEnd = m_index->getCounterGET(&m_endKey, true); } else { rkEnd = m_index->getCounterGET(&m_endKey, false); } } } } else { rkEnd = m_index->getSize(); rightIncluded = 1; } rkRes = rkEnd - rkStart - 1 + leftIncluded + rightIncluded; VOLT_DEBUG("Index Count ANSWER %ld = %ld - %ld - 1 + %d + %d\n", (long)rkRes, (long)rkEnd, (long)rkStart, leftIncluded, rightIncluded); tmptup.setNValue(0, ValueFactory::getBigIntValue( rkRes )); m_outputTable->insertTuple(tmptup); VOLT_DEBUG ("Index Count :\n %s", m_outputTable->debug().c_str()); return true; }
bool NestLoopExecutor::p_execute(const NValueArray ¶ms, ReadWriteTracker *tracker) { VOLT_DEBUG("executing NestLoop..."); NestLoopPlanNode* node = dynamic_cast<NestLoopPlanNode*>(abstract_node); assert(node); assert(node->getInputTables().size() == 2); Table* output_table_ptr = node->getOutputTable(); assert(output_table_ptr); // output table must be a temp table TempTable* output_table = dynamic_cast<TempTable*>(output_table_ptr); assert(output_table); Table* outer_table = node->getInputTables()[0]; assert(outer_table); Table* inner_table = node->getInputTables()[1]; assert(inner_table); VOLT_TRACE ("input table left:\n %s", outer_table->debug().c_str()); VOLT_TRACE ("input table right:\n %s", inner_table->debug().c_str()); // // Join Expression // AbstractExpression *predicate = node->getPredicate(); if (predicate) { predicate->substitute(params); VOLT_TRACE ("predicate: %s", predicate == NULL ? "NULL" : predicate->debug(true).c_str()); } int outer_cols = outer_table->columnCount(); int inner_cols = inner_table->columnCount(); TableTuple outer_tuple(node->getInputTables()[0]->schema()); TableTuple inner_tuple(node->getInputTables()[1]->schema()); TableTuple &joined = output_table->tempTuple(); TableIterator iterator0(outer_table); while (iterator0.next(outer_tuple)) { // populate output table's temp tuple with outer table's values // probably have to do this at least once - avoid doing it many // times per outer tuple for (int col_ctr = 0; col_ctr < outer_cols; col_ctr++) { joined.setNValue(col_ctr, outer_tuple.getNValue(col_ctr)); } TableIterator iterator1(inner_table); while (iterator1.next(inner_tuple)) { if (predicate == NULL || predicate->eval(&outer_tuple, &inner_tuple).isTrue()) { // Matched! Complete the joined tuple with the inner column values. for (int col_ctr = 0; col_ctr < inner_cols; col_ctr++) { joined.setNValue(col_ctr + outer_cols, inner_tuple.getNValue(col_ctr)); } output_table->insertTupleNonVirtual(joined); } } } return (true); }