// A transformation method for protecting sequence functions from not
// being evaluated due to short-circuit evaluation. 
//
void BiLogic::protectiveSequenceFunctionTransformation(Generator *generator)
{
  // Recurse on the children
  //
  ItemExpr::protectiveSequenceFunctionTransformation(generator);

  // Remove the original value id from the node being transformed and
  // assign it a new value id.
  //
  ValueId id = getValueId();
  setValueId(NULL_VALUE_ID);
  synthTypeAndValueId(TRUE);

  // Construct the new subtree.
  //
  // AND/OR -- force right child evaluation
  //
  // LOGIC(LEFT_CHILD, RIGHT_CHILD) ==>
  //   BLOCK(RIGHT_CHILD, LOGIC(LEFT_CHILD, RIGHT_CHILD))
  //
  ItemExpr *block = new(generator->wHeap()) ItmBlockFunction(child(1), this);

  // Replace the old expression with the new expression for the 
  // orginal value id
  //
  id.replaceItemExpr(block);

  // Run the new expression through type and value id synthesis
  //
  block->synthTypeAndValueId(TRUE);
}
// A transformation method for protecting sequence functions from not
// being evaluated due to short-circuit evaluation. 
//
void ItmScalarMinMax::protectiveSequenceFunctionTransformation
(Generator *generator)
{
  // Recurse on the children
  //
  ItemExpr::protectiveSequenceFunctionTransformation(generator);

  // Remove the original value id from the node being transformed.
  //
  ValueId id = getValueId();
  setValueId(NULL_VALUE_ID);
  synthTypeAndValueId(TRUE);

  // Construct the new subtree.
  //
  // SCALAR_MIN/MAX -- force evaluation of both children
  //
  // SCALAR(LEFT_CHILD, RIGHT_CHILD) ==>
  //   BLOCK(BLOCK(LEFT_CHILD, RIGHT_CHILD), 
  //         SCALAR(LEFT_CHILD, RIGHT_CHILD))
  // 
  ItemExpr *block = new(generator->wHeap()) ItmBlockFunction
    (new(generator->wHeap()) ItmBlockFunction(child(0), child(1)), this);
  
  // Replace the old expression with the new expression for the 
  // orginal value id
  //
  id.replaceItemExpr(block);

  // Run the new expression through type and value id synthesis
  //
  block->synthTypeAndValueId(TRUE);
}
void PhysSequence::transformOlapFunctions(CollHeap *wHeap)
{

 
  for(ValueId valId = sequenceFunctions().init();
      sequenceFunctions().next(valId);
      sequenceFunctions().advance(valId)) 
  {
    
    ItemExpr * itmExpr = valId.getItemExpr();

    //NAType *itmType = itmExpr->getValueId().getType().newCopy(wHeap);

    if (itmExpr->isOlapFunction())
    {
      NAType *itmType = itmExpr->getValueId().getType().newCopy(wHeap);

      itmExpr = ((ItmSeqOlapFunction*)itmExpr)->transformOlapFunction(wHeap);

      CMPASSERT(itmExpr);
      if(itmExpr->getValueId() != valId)
      {
	itmExpr = new (wHeap) Cast(itmExpr, itmType);
	itmExpr->synthTypeAndValueId(TRUE);
	valId.replaceItemExpr(itmExpr);
	itmExpr->getValueId().changeType(itmType);//????
      }
    }
      itmExpr->transformOlapFunctions(wHeap);
  }
}
ItemExpr *
addConvNode(ItemExpr *childExpr,
            ValueIdMap *mapping,
            CollHeap *wHeap)

{
  if(childExpr->getOperatorType() != ITM_CONVERT &&
     !childExpr->isASequenceFunction()) {

    ValueId topValue;
    mapping->mapValueIdUp(topValue,
                          childExpr->getValueId());
    if(topValue == childExpr->getValueId()) {

      // add the convert node
      ItemExpr *newChild = new(wHeap) Convert (childExpr);
      newChild->synthTypeAndValueId(TRUE);
      mapping->addMapEntry(newChild->getValueId(),
                           childExpr->getValueId());
      return newChild;
    } else {
      return topValue.getItemExpr();
    }
  }
  return childExpr;
}
Exemplo n.º 5
0
NABoolean TableDesc::isIdentityColumnGeneratedAlways(NAString * value) const
{
    // Determine if an IDENTITY column exists and
    // has the default class of GENERATED ALWAYS AS IDENTITY.
    // Do not return TRUE, if the table type is an INDEX_TABLE.

    NABoolean result = FALSE;

    for (CollIndex j = 0; j < colList_.entries(); j++)
    {
        ValueId valId = colList_[j];
        NAColumn *column = valId.getNAColumn();

        if(column->isIdentityColumnAlways())
        {
            if (getNATable()->getObjectType() != COM_INDEX_OBJECT)
            {
                if (value != NULL)
                    *value = column->getColName();
                result = TRUE;
            }
        }
    }

    return result;
}
Exemplo n.º 6
0
ValueId NormWA::getEquivalentItmSequenceFunction(ValueId newSeqId)
{
  ValueId equivId = newSeqId;

  ItemExpr *newItem = newSeqId.getItemExpr();
  ItmSequenceFunction *newSeq = NULL;
  if(newItem->isASequenceFunction()) {
    newSeq = (ItmSequenceFunction *)newItem;
  }

  if(newSeq) {
    for(ValueId seqId = allSeqFunctions_.init(); allSeqFunctions_.next(seqId); 
      allSeqFunctions_.advance(seqId) ){
      ItemExpr *seq = seqId.getItemExpr();
      if(newSeq->isEquivalentForBinding(seq)){
	equivId = seqId;
	if(newSeq->origOpType() != seq->origOpType()) {
	  seq->setOrigOpType(seq->getOperatorType());
	}
	break;
      }
    }
  }

  allSeqFunctions_ += equivId;

  //
  return equivId;
}
void
RelSequence::addCancelExpr(CollHeap *wHeap)
{
  ItemExpr *cPred = NULL;

  if (this->partition().entries() > 0)
  {
    return;
  }
  if(cancelExpr().entries() > 0) 
  {
    return;
  }

  for(ValueId valId = selectionPred().init();
      selectionPred().next(valId);
      selectionPred().advance(valId)) 
  {
    ItemExpr *pred = valId.getItemExpr();

    // Look for preds that select a prefix of the sequence.
    // Rank() < const; Rank <= const; const > Rank; const >= Rank
    ItemExpr *op1 = NULL;
    ItemExpr *op2 = NULL;

    if(pred->getOperatorType() == ITM_LESS ||
       pred->getOperatorType() == ITM_LESS_EQ) 
    {
      op1 = pred->child(0);
      op2 = pred->child(1);
    }
    else if (pred->getOperatorType() == ITM_GREATER ||
             pred->getOperatorType() == ITM_GREATER_EQ) 
    {
      op1 = pred->child(1);
      op2 = pred->child(0);
    }
    NABoolean negate;
    if (op1 && op2 &&
        (op2->getOperatorType() == ITM_CONSTANT || 
         op2->getOperatorType() == ITM_DYN_PARAM)  &&
         (op1->getOperatorType() == ITM_OLAP_RANK ||
          op1->getOperatorType() == ITM_OLAP_DRANK ||
          (op1->getOperatorType() == ITM_OLAP_COUNT &&
           op1->child(0)->getOperatorType() == ITM_CONSTANT &&
           !op1->child(0)->castToConstValue(negate)->isNull())))
    {
       cPred = new(wHeap) UnLogic(ITM_NOT, pred);
       //break at first occurence
       break;
    }
  }
  
  if(cPred) 
  {
    cPred->synthTypeAndValueId(TRUE);
    cancelExpr().insert(cPred->getValueId());
  }
}
// A transformation method for protecting sequence functions from not
// being evaluated due to short-circuit evaluation.
//
void Case::protectiveSequenceFunctionTransformation(Generator *generator)
{
  // Recurse on the children
  //
  ItemExpr::protectiveSequenceFunctionTransformation(generator);

  // Remove the original value id from the node being transformed and
  // assign it a new value id.
  //
  ValueId id = getValueId();
  setValueId(NULL_VALUE_ID);
  synthTypeAndValueId(TRUE);

  // Construct the new subtree.
  //
  // Case -- force evaluation of all the WHEN, THEN and ELSE parts
  //
  // CASE(IFE1(W1,T1,IFE2(W2,T2,IFE3(...)))) ==>
  //   BLOCK(BLOCK(BLOCK(W1,T1),BLOCK(W2,T2)), CASE(...))
  //
  // Decend the ITM_IF_THEN_ELSE tree pulling out each WHEN and THEN pair.
  // Mate each pair with a block and attach them to the protected block, 
  // which contains all of the WHEN/THEN pairs for the entire tree.
  // Also, pull out any CASE operands and attach them to the protected
  // block as well.
  //
  ItemExpr *block = NULL;
  ItemExpr *ife = child(0);
  for(; (ife != NULL) && (ife->getOperatorType() == ITM_IF_THEN_ELSE);
       ife = ife->child(2))
    {
      ItemExpr *sub = new(generator->wHeap())
	ItmBlockFunction(ife->child(0), ife->child(1));
      if(block)
	block = new(generator->wHeap()) ItmBlockFunction(sub, block);
      else
	block = sub;
    }      

  // Add the ELSE condition, if any to the protected block
  //
  if(ife)
    block = new(generator->wHeap()) ItmBlockFunction(ife, block);

  // Construct the top-level block function. The left child is the protected
  // block, which contains all of the expresssions that need to be 
  // pre-evaluated. This right child is the original case statement.
  //
  block = new(generator->wHeap()) ItmBlockFunction(block, this);

  // Replace the old expression with the new expression for the
  // original id
  //
  id.replaceItemExpr(block);

  // Run the new expression through type and value id synthesis
  //
  block->synthTypeAndValueId(TRUE);
}
Exemplo n.º 9
0
// -----------------------------------------------------------------------
// TableDesc::getSystemColumnList()
// -----------------------------------------------------------------------
void TableDesc::getSystemColumnList(ValueIdList &columnList) const
{
    for (CollIndex i = 0; i < colList_.entries(); i++) {
        ValueId valId = colList_[i];
        NAColumn *column = valId.getNAColumn();
        if (column->isSystemColumn())
            columnList.insert(valId);
    }
}
Exemplo n.º 10
0
// PhysSequence::computeHistoryAttributes
//
// Helper function to compute the attribute for the history buffer based 
// on the items projected from the child and the computed history items.
// Also, adds the attribute information the the map table.
//
void
PhysSequence::computeHistoryAttributes(Generator *generator,
                                       MapTable *localMapTable, 
                                       Attributes **attrs,
                                       const ValueIdSet &historyIds) const
{
  // Get a local handle on some of the generator objects.
  //
  CollHeap *wHeap = generator->wHeap();

  // Populate the attribute vector with the flattened list of sequence 
  // functions and/or sequence function arguments that must be in the
  // history row. Add convert nodes for the items that are not sequence
  // functions to force them to be moved into the history row.
  //
  if(NOT historyIds.isEmpty())
    {
      Int32 i = 0;
      ValueId valId;

      for (valId = historyIds.init();
           historyIds.next(valId);
           historyIds.advance(valId))
        {
          // If this is not a sequence function, then insert a convert
          // node.
          //
          if(!valId.getItemExpr()->isASequenceFunction())
             {
               // Get a handle on the original expression and erase
               // the value ID.
               //
               ItemExpr *origExpr = valId.getItemExpr();
               origExpr->setValueId(NULL_VALUE_ID);
               origExpr->markAsUnBound();

               // Construct the cast expression with the original expression
               // as the child -- must have undone the child value ID to
               // avoid recursion later.
               //
               ItemExpr *castExpr = new(wHeap) 
                 Cast(origExpr, &(valId.getType()));

               // Replace the expression for the original value ID and the
               // synthesize the types and value ID for the new expression.
               //
               valId.replaceItemExpr(castExpr);
               castExpr->synthTypeAndValueId(TRUE);
             }
          attrs[i++] = (generator->addMapInfoToThis(localMapTable, valId, 0))->getAttr();
        }
    }
} // PhysSequence::computeHistoryAttributes
Exemplo n.º 11
0
NABoolean TableDesc::hasIdentityColumnInClusteringKey() const
{
    ValueIdSet pKeyColumns = clusteringIndex_->getIndexKey();
    NAColumn * column = NULL;
    for(ValueId id = pKeyColumns.init(); pKeyColumns.next(id);
            pKeyColumns.advance(id))
    {
        column = id.getNAColumn();
        if (column && column->isIdentityColumn())
            return TRUE;
    }
    return FALSE;
}
Exemplo n.º 12
0
// -----------------------------------------------------------------------
// TableDesc::getIdentityColumn()
// -----------------------------------------------------------------------
void TableDesc::getIdentityColumn(ValueIdList &columnList) const
{
    for (CollIndex i = 0; i < colList_.entries(); i++)
    {
        ValueId valId = colList_[i];
        NAColumn *column = valId.getNAColumn();
        if (column->isIdentityColumn())
        {
            columnList.insert(valId);
            break; // Break when you find the first,
            // as there can only be one Identity column per table.
        }
    }
}
Exemplo n.º 13
0
// Here is how we detect that:
// 1. indexColumn_.entries() == 1. (There can only be one IDENTITY column.)
// 2. Is this indexColumn_ an IDENTITY column? Look up the 
//    IDENTITY property in the BaseTable's TableDesc by using 
//    getPosition().  
void IndexDesc::markIdentityColumnUniqueIndex(TableDesc *tdesc)
{

  if(indexKey_.entries() != 1) 
    {
      // there can be only one IDENTITY col. id any
      return;
    }
  
  // Is this indexKey_ column an IDENTITY column? 
  
  CollIndex identityColPosition = indexKey_[0].getNAColumn()->getPosition();  
  ValueId identityValueId = tdesc->getColumnList()[identityColPosition];
  if (identityValueId.getNAColumn()->isIdentityColumn())
    identityColumnUniqueIndexFlag_ = TRUE;
 
  return;
}
Exemplo n.º 14
0
ex_expr *
PhysSequence::generateChildProjectExpression(Generator *generator, 
                                             MapTable *mapTable, 
                                             MapTable *localMapTable,
                                             const ValueIdSet &childProjectIds) const
{
  ex_expr * projectExpr = NULL;
  if(NOT childProjectIds.isEmpty())
    {
      // Generate the clauses for the expression
      //
      generator->getExpGenerator()->generateSetExpr(childProjectIds,
                                                    ex_expr::exp_ARITH_EXPR,
                                                    &projectExpr);

      // Add the projected values to the local map table.
      //
      ValueId valId;
      for(valId = childProjectIds.init();
          childProjectIds.next(valId);
          childProjectIds.advance(valId))
        {
          // Get the attribute information from the convert destination.
          //
          Attributes *newAttr = mapTable->getMapInfo(valId)->getAttr();

          // Add the original value to the local map table with the
          // attribute information from the convert desination.
          //
          MapInfo *mapInfo = localMapTable->addMapInfoToThis
            (valId.getItemExpr()->child(0)->getValueId(), newAttr);

          // Nothing more needs to be done for this item.
          //
          mapInfo->codeGenerated();
        }
    }

  return projectExpr;
} // PhysSequence::generateChildProjectExpression
Exemplo n.º 15
0
// TableDesc::isKeyIndex()
// Parameter is an secondary index on the table. Table checks to see
// if the keys of the secondary index is built using the primary key
// of the table. If it is return true otherwise false.
NABoolean TableDesc::isKeyIndex(const IndexDesc * idesc) const
{
    ValueIdSet pKeyColumns = clusteringIndex_->getIndexKey();
    ValueIdSet indexColumns = idesc->getIndexKey();
    ValueIdSet basePKeys=pKeyColumns.convertToBaseIds();


    for(ValueId id = indexColumns.init(); indexColumns.next(id);
            indexColumns.advance(id))
    {
        ValueId baseId = ((BaseColumn *)(((IndexColumn *)id.getItemExpr())->
                                         getDefinition().getItemExpr()))->getValueId();
        if(NOT basePKeys.contains(baseId))
        {
            return FALSE;
        }
    }

    return TRUE;


}
Exemplo n.º 16
0
void HbaseSearchSpec::addColumnNames(const ValueIdSet& vs)
{
  // TEMP TEMP. Not all needed column names are being set up.
  // for now, return without populating result.
  // that will cause all columns to be retrieved.
  //return;

   for (ValueId vid = vs.init(); vs.next(vid); vs.advance(vid)) {
      ItemExpr* ie = vid.getItemExpr();

      NAString colName;
      if ( ie->getOperatorType() == ITM_BASECOLUMN ) {
	colName = ((BaseColumn*)ie)->getColName();
      } else
	if ( ie->getOperatorType() == ITM_INDEXCOLUMN ) {
	  colName = ((IndexColumn*)ie)->getNAColumn()->getIndexColName();
	}
      
      if (NOT colNames_.contains(colName))
	colNames_.insert(colName);
   }
}
Exemplo n.º 17
0
// this method sets the primary key columns. It goes through all the columns
// of the table, and collects the columns which are marked as primary keys
void TableDesc::setPrimaryKeyColumns()
{
    ValueIdSet primaryColumns;

    for ( CollIndex j = 0 ; j < colList_.entries() ; j++ )
    {

        ValueId valId = colList_[j];

        NAColumn *column = valId.getNAColumn();

        if ( column->isPrimaryKey() )
        {
            primaryColumns.insert(valId) ;
            // mark column as referenced for histogram, as we may need its histogram
            // during plan generation
            if ((column->isUserColumn() || column->isSaltColumn() ) &&
                    (column->getNATable()->getSpecialType() == ExtendedQualName::NORMAL_TABLE) )
                column->setReferencedForMultiIntHist();
        }
    }

    primaryKeyColumns_ = primaryColumns;
}
Exemplo n.º 18
0
short ExpGenerator::buildKeyInfo(keyRangeGen ** keyInfo, // out -- generated object
                                 Generator * generator,
                                 const NAColumnArray & keyColumns,
                                 const ValueIdList & listOfKeyColumns,
                                 const ValueIdList & beginKeyPred,
                                 const ValueIdList & endKeyPred,
                                 const SearchKey * searchKey,
                                 const MdamKey * mdamKeyPtr,
                                 const NABoolean reverseScan,
                                 unsigned short keytag,
                                 const ExpTupleDesc::TupleDataFormat tf,
                                 // the next few parameters are here
                                 // as part of a horrible kludge for
                                 // the PartitionAccess::codeGen()
                                 // method, which lacks a SearchKey
                                 // object and therefore exposes
                                 // things like the exclusion
                                 // expressions; with luck, later work
                                 // in the Optimizer will result in a
                                 // much cleaner interface
                                 const NABoolean useTheHorribleKludge,
                                 ItemExpr * beginKeyExclusionExpr,
                                 ItemExpr * endKeyExclusionExpr,

                                 ex_expr_lean ** unique_key_expr,
                                 ULng32 *uniqueKeyLen,
                                 NABoolean doKeyEncodeOpt,
                                 Lng32 * firstKeyColOffset,
				 Int32 in_key_atp_index
                                 )

{
  Space * space = generator->getSpace();

  const Int32 work_atp = 1;
  const Int32 key_atp_index = (in_key_atp_index <= 0 ? 2 : in_key_atp_index);
  const Int32 exclude_flag_atp_index = 3;
  const Int32 data_conv_error_atp_index = 4;
  const Int32 key_column_atp_index = 5; // used only for Mdam
  const Int32 key_column2_atp_index = 6; // used only for Mdam MDAM_BETWEEN pred;
                                         //   code in BiLogic::mdamPredGenSubrange
                                         //   and MdamColumn::buildDisjunct
                                         //   requires this to be 1 more than
                                         //   key_column_atp_index
  ULng32 keyLen;

  // add an entry to the map table for work Atp
  MapTable *keyBufferPartMapTable = generator->appendAtEnd();

  // generate a temporary variable, which will be used for handling
  // data conversion errors during key building
  ValueIdList temp_varb_list;

  ItemExpr * dataConversionErrorFlag = new(generator->wHeap())
    HostVar("_sys_dataConversionErrorFlag",
            new(generator->wHeap()) SQLInt(TRUE,FALSE), // int not null
            TRUE);
  ULng32 temp_varb_tupp_len;

  dataConversionErrorFlag->bindNode(generator->getBindWA());
  temp_varb_list.insert(dataConversionErrorFlag->getValueId());
  processValIdList(temp_varb_list,
                   ExpTupleDesc::SQLARK_EXPLODED_FORMAT,
                   temp_varb_tupp_len,  // out
                   work_atp,
                   data_conv_error_atp_index);

  NABoolean doEquiKeyPredOpt = FALSE;
#ifdef _DEBUG
  if (getenv("DO_EQUI_KEY_PRED_OPT"))
    doEquiKeyPredOpt 
      = (searchKey ? searchKey->areAllChosenPredsEqualPreds() : FALSE);
#endif
  if (mdamKeyPtr == NULL)
    {
      // check to see if there is a begin key expression; if there
      // isn't, don't generate a key object
      if (beginKeyPred.entries() == 0)
        *keyInfo = 0;
      else
        {
          // For subset and range operators, generate the begin key
          // expression, end key expression, begin key exclusion expression
          // and end key exclusion expression.  For unique operators,
          // generate only the begin key exppression.
          ex_expr *bk_expr = 0;
          ex_expr *ek_expr = 0;
          ex_expr *bk_excluded_expr = 0;
          ex_expr *ek_excluded_expr = 0;
          
          short bkey_excluded = 0;
          short ekey_excluded = 0;
          
          generateKeyExpr(keyColumns,
              beginKeyPred,
              work_atp,
              key_atp_index,
              dataConversionErrorFlag,
              tf,
              keyLen, // out
              &bk_expr,  // out
              doKeyEncodeOpt,
              firstKeyColOffset,
              doEquiKeyPredOpt);
              
          if (&endKeyPred)
             generateKeyExpr(keyColumns,
                 endKeyPred,
                 work_atp,
                 key_atp_index,
                 dataConversionErrorFlag,
                 tf,
                 keyLen, // out -- should be the same as above
                 &ek_expr,  // out
                 doKeyEncodeOpt,
                 firstKeyColOffset,
                 doEquiKeyPredOpt);
              
          if (reverseScan)
            {
              // reverse scan - swap the begin and end key predicates
              
              // Note: evidently, the Optimizer has already switched
              // the key predicates in this case, so what we are
              // really doing is switching them back.
              
              ex_expr *temp = bk_expr;
              bk_expr = ek_expr;
              ek_expr = temp;
            }
          if (searchKey)
            {
              generateExclusionExpr(searchKey->getBeginKeyExclusionExpr(),
                  work_atp,
                  exclude_flag_atp_index,
                  &bk_excluded_expr); // out
              
              bkey_excluded = (short) searchKey->isBeginKeyExclusive(); 
              
              generateExclusionExpr(searchKey->getEndKeyExclusionExpr(),
                  work_atp,
                  exclude_flag_atp_index,
                  &ek_excluded_expr); // out
              
              ekey_excluded = (short) searchKey->isEndKeyExclusive(); 

              if (reverseScan)
                {
                  NABoolean x = bkey_excluded;
                  bkey_excluded = ekey_excluded;
#pragma nowarn(1506)   // warning elimination 
                  ekey_excluded = x;
#pragma warn(1506)  // warning elimination 

                  ex_expr* temp = bk_excluded_expr;
                  bk_excluded_expr = ek_excluded_expr;
                  bk_excluded_expr = temp;
                }
            } // if searchKey
          else if (useTheHorribleKludge)
            {
              generateExclusionExpr(beginKeyExclusionExpr,
                    work_atp,
                    exclude_flag_atp_index,
                    &bk_excluded_expr); // out
              
              generateExclusionExpr(endKeyExclusionExpr,
                    work_atp,
                    exclude_flag_atp_index,
                    &ek_excluded_expr); // out
              
              // note that the old PartitionAccess::codeGen() code didn't
              // set values for bkey_excluded and ekey_excluded, so the
              // safest choice is to choose inclusion, i.e. let the flags
              // retain their initial value of 0.
            }
          
          // Build key info
          if (keytag > 0)
            keyLen += sizeof(short);

          if ((unique_key_expr == NULL) ||
              (NOT generator->genLeanExpr()))
            {
              // the work cri desc is used to build key values (entry 2) and
              // to compute the exclusion flag (entry 3) to monitor for data
              // conversion errors (entry 4) and to compute values on a column
              // basis (entry 5 - Mdam only)
              ex_cri_desc * work_cri_desc 
                        = new(space) ex_cri_desc(6, space);
              
              *keyInfo = new(space) keySingleSubsetGen(
                   keyLen,
                   work_cri_desc,
                   key_atp_index,
                   exclude_flag_atp_index,
                   data_conv_error_atp_index,
                   bk_expr,
                   ek_expr,
                   bk_excluded_expr,
                   ek_excluded_expr,
                   // static exclude flags (if exprs are NULL)
                   bkey_excluded,
                   ekey_excluded); 
              if (unique_key_expr)
                *unique_key_expr = NULL;
            }
          else
            {
              if (keyInfo)
                *keyInfo = NULL;
              *unique_key_expr = (ex_expr_lean*)bk_expr;
              *uniqueKeyLen = keyLen;
            }
        }
    }  // end of non-mdam case
  else // Mdam case
    {
      // the work cri desc is used to build key values (entry 2) and
      // to compute the exclusion flag (entry 3) to monitor for data
      // conversion errors (entry 4) and to compute values on a column
      // basis (entry 5 - Mdam only, and entry 6 - Mdam only, and only
      // for MDAM_BETWEEN predtype)
      ex_cri_desc * work_cri_desc 
            = new(space) ex_cri_desc(7, space);

      // compute the format of the key buffer -- We need this
      // so that Mdam will know, for each column, where in the buffer
      // to move a value and how many bytes that value takes.  The
      // next few lines of code result in this information being stored
      // in the attrs array.

      // Some words on the technique:  We create expressions whose
      // result datatype matches the key buffer datatypes for each key
      // column.  Then we use the datatypes of these expressions to
      // compute buffer format.  The expressions themselves are not
      // used any further; they do not result in compiled expressions
      // in the plan.  At run time we use string moves to move key
      // values instead.

 
      const CollIndex keyCount = listOfKeyColumns.entries();
      CollIndex i;

      // assert at least one column
      GenAssert(keyCount > 0,"MDAM:  at least one key column required.");

      Attributes ** attrs = new(generator->wHeap()) Attributes * [keyCount];
      
      for (i = 0; i < keyCount; i++)
        {
          ItemExpr * col_node =
              listOfKeyColumns[i].getItemExpr(); 
          ItemExpr *enode = col_node;

          if ((tf == ExpTupleDesc::SQLMX_KEY_FORMAT) &&
              (enode->getValueId().getType().getVarLenHdrSize() > 0))
            {
              // varchar keys in SQL/MP tables are converted to
              // fixed length chars in key buffers

              const CharType& char_type =
                          (CharType&)(enode->getValueId().getType());

              if (!CollationInfo::isSystemCollation(char_type.getCollation()))
                {
                  enode = new(generator->wHeap()) 
                             Cast(enode, 
                                   (new (generator->wHeap())
                                          SQLChar( CharLenInfo(char_type.getStrCharLimit(),
                                                   char_type.getDataStorageSize()),
                                          char_type.supportsSQLnull(),
                                          FALSE, FALSE, FALSE,
                                          char_type.getCharSet(),
                                          char_type.getCollation(),
                                          char_type.getCoercibility())));
                }
            }

          NABoolean desc_flag;

          if (keyColumns.isAscending(i))
            desc_flag = reverseScan;
          else
            desc_flag = !reverseScan;

#pragma nowarn(1506)   // warning elimination 
          enode = new(generator->wHeap()) CompEncode(enode,desc_flag); 
#pragma warn(1506)  // warning elimination 
          enode->bindNode(generator->getBindWA());

          attrs[i] = 
            (generator->
             addMapInfoToThis(keyBufferPartMapTable, enode->getValueId(), 0))->getAttr();
        }  // for, over keyCount

      // Compute offsets, lengths, etc. and assign them to the right
      // atp and atp index

      processAttributes((ULng32)keyCount,
                        attrs, tf,
                        keyLen,
                        work_atp, key_atp_index);

      // Now we have key column offsets and lengths stored in attrs.
 
      // Next, for each column, generate expressions to compute hi,
      // lo, non-null hi and non-null lo values, and create
      // MdamColumnGen structures.

      // Notes: In the Mdam network itself, all key values are
      // encoded.  Hence, we generate CompEncode nodes in all of the
      // expressions, regardless of tuple format.  In the Simulator
      // case, we must at run-time decode the encoded values when
      // moving them to the key buffer.  $$$ We need an expression to
      // do this.  This decoding work has not yet been done, so the
      // simulator only works correctly for columns that happen to be
      // correctly aligned and whose encoding function does not change
      // the value. $$$

      MdamColumnGen * first = 0;
      MdamColumnGen * last = 0;
      LIST(NAType *) keyTypeList(generator->wHeap());//to keep the type of the keys for later

      for (i = 0; i < keyCount; i++)
        {
          // generate expressions to compute hi, lo, non-null hi, non-null lo
          NAType * targetType = (keyColumns[i]->getType())->newCopy(generator->wHeap());

          // Genesis case 10-971031-9814 fix: desc_flag must take into account
          // both the ASC/DESC attribute of the key column and the reverseScan
          // attribute. Before this fix, it only took into account the first of
          // these.
          NABoolean desc_flag;

          if (keyColumns.isAscending(i))
            desc_flag = reverseScan;
          else
            desc_flag = !reverseScan;
          // End Genesis case 10-971031-9814 fix.

          if ((tf == ExpTupleDesc::SQLMX_KEY_FORMAT) &&
              (targetType->getVarLenHdrSize() > 0))
            {

// 5/9/98: add support for VARNCHAR
              const CharType* char_type = (CharType*)(targetType);

              if (!CollationInfo::isSystemCollation(char_type->getCollation()))
                {
                  targetType = new(generator->wHeap()) 
                                      SQLChar( CharLenInfo(char_type->getStrCharLimit(),
                                                           char_type->getDataStorageSize()),
                                      char_type -> supportsSQLnull(),
                                      FALSE, FALSE, FALSE,
                                      char_type -> getCharSet(),
                                      char_type -> getCollation(),
                                      char_type -> getCoercibility());
/*
                  targetType->getNominalSize(),
                  targetType->supportsSQLnull()
*/
                }
            }

          keyTypeList.insert(targetType);  // save in ith position for later
  
          // don't need to make copy of targetType in next call
          ItemExpr * lo = new(generator->wHeap()) ConstValue(targetType, 
                                !desc_flag, 
                                TRUE /* allow NULL */);
#pragma nowarn(1506)   // warning elimination 
          lo = new(generator->wHeap()) CompEncode(lo,desc_flag); 
#pragma warn(1506)  // warning elimination 
          lo->bindNode(generator->getBindWA());

          ValueIdList loList;
          loList.insert(lo->getValueId());

          ex_expr *loExpr = 0;
          ULng32 dataLen = 0;

          generateContiguousMoveExpr(loList,
                   0, // don't add convert nodes
                   work_atp,
                   key_column_atp_index,
                   tf,
                   dataLen,
                   &loExpr);

          ItemExpr * hi = new(generator->wHeap()) ConstValue(targetType->newCopy(generator->wHeap()),
                                desc_flag,
                                TRUE /* allow NULL */);
#pragma nowarn(1506)   // warning elimination 
          hi = new(generator->wHeap()) CompEncode(hi,desc_flag);
#pragma warn(1506)  // warning elimination 
          hi->bindNode(generator->getBindWA());

          ValueIdList hiList;
          hiList.insert(hi->getValueId());

          ex_expr *hiExpr = 0;
          
          generateContiguousMoveExpr(hiList,
                   0, // don't add convert nodes
                   work_atp,
                   key_column_atp_index,
                   tf,
                   dataLen,
                   &hiExpr);

          ex_expr *nonNullLoExpr = loExpr;
          ex_expr *nonNullHiExpr = hiExpr;

          if (targetType->supportsSQLnull())
            {
              if (desc_flag)
                {
                  ItemExpr * nonNullLo = new(generator->wHeap())
                    ConstValue(targetType->newCopy(generator->wHeap()),
                               !desc_flag, 
                               FALSE /* don't allow NULL */);
      #pragma nowarn(1506)   // warning elimination 
                  nonNullLo = new(generator->wHeap()) CompEncode(nonNullLo,desc_flag); 
      #pragma warn(1506)  // warning elimination 
                  nonNullLo->bindNode(generator->getBindWA());

                  ValueIdList nonNullLoList;
                  nonNullLoList.insert(nonNullLo->getValueId());
                  nonNullLoExpr = 0;  // so we will get an expression back

                  generateContiguousMoveExpr(nonNullLoList,
                           0, // don't add convert nodes
                           work_atp,
                           key_column_atp_index,
                           tf,
                           dataLen,
                           &nonNullLoExpr);
                }
              else
                {
                  ItemExpr * nonNullHi = new(generator->wHeap())
                    ConstValue(targetType->newCopy(generator->wHeap()),
                         desc_flag, 
                         FALSE /* don't allow NULL */);
#pragma nowarn(1506)   // warning elimination 
                  nonNullHi = new(generator->wHeap()) CompEncode(nonNullHi,desc_flag); 
#pragma warn(1506)  // warning elimination 
                  nonNullHi->bindNode(generator->getBindWA());
                  
                  ValueIdList nonNullHiList;
                  nonNullHiList.insert(nonNullHi->getValueId());
                  nonNullHiExpr = 0;  // so we will get an expression back
                  
                  generateContiguousMoveExpr(nonNullHiList,
                           0, // don't add convert nodes
                           work_atp,
                           key_column_atp_index,
                           tf,
                           dataLen,
                           &nonNullHiExpr);
                }
            }

          NABoolean useSparseProbes = mdamKeyPtr->isColumnSparse(i); 
          
          // calculate offset to the beginning of the column value
          // (including the null indicator and the varchar length
          // indicator if present)

          ULng32 column_offset = attrs[i]->getOffset();

          if (attrs[i]->getNullFlag())
            column_offset = attrs[i]->getNullIndOffset();
          else if (attrs[i]->getVCIndicatorLength() > 0)
            column_offset = attrs[i]->getVCLenIndOffset();

          last = new(space) MdamColumnGen(last,
                  dataLen,
                  column_offset,
                  useSparseProbes,
                  loExpr,
                  hiExpr,
                  nonNullLoExpr,
                  nonNullHiExpr);
          if (first == 0)
            first = last;
        }  // for over keyCount

      // generate MdamPred's and attach to MdamColumnGen's

      const ColumnOrderListPtrArray &columnOrderListPtrArray =
           mdamKeyPtr->getColumnOrderListPtrArray();

#ifdef _DEBUG
      // Debug print stataments below depend on this
      // variable:
      char *ev = getenv("MDAM_PRINT");
      const NABoolean mdamPrintOn = (ev != NULL AND strcmp(ev,"ON")==0);
#endif

#ifdef _DEBUG
      if (mdamPrintOn)
        {
          fprintf(stdout, "\n\n***Generating the MDAM key for table with index"
                  " columns: ");
          listOfKeyColumns.display();
        }
#endif

      for (CollIndex n = 0; n < columnOrderListPtrArray.entries(); n++)
        {
          // get the list of key predicates associated with the n disjunct:
          const ColumnOrderList &columnOrderList = *columnOrderListPtrArray[n];

#ifdef _DEBUG
          if (mdamPrintOn)
            {
              fprintf(stdout,"\nDisjunct[%d]:----------------\n",n);
              columnOrderList.print();
            }
#endif
          MdamColumnGen * cc = first;

          CMPASSERT(keyCount == columnOrderList.entries());
          const ValueIdSet *predsPtr = NULL;
          for (i = 0; i < keyCount; i++)
            {
#ifdef _DEBUG
              if (mdamPrintOn)
                {
                  fprintf(stdout, "Column(%d) using: ", i);
                  if ( mdamKeyPtr->isColumnSparse(i) )
                    fprintf(stdout,"SPARSE probes\n");
                  else
                    fprintf(stdout, "DENSE probes\n");
                }
#endif
              // get predicates for column order i:
              predsPtr = columnOrderList[i];

              NAType * keyType = keyTypeList[i];

              NABoolean descending;

              if (keyColumns.isAscending(i))
                descending = reverseScan;
              else
                descending = !reverseScan;

              ValueId keyColumn = listOfKeyColumns[i];

              MdamCodeGenHelper mdamHelper(
                   n,
                   keyType,
                   descending,
                   work_atp,
                   key_column_atp_index,
                   tf,
                   dataConversionErrorFlag,
                   keyColumn);

              MdamPred * lastPred = cc->getLastPred();

              if (predsPtr != NULL)
                {
                  for (ValueId predId = predsPtr->init(); 
                       predsPtr->next(predId); predsPtr->advance(predId))
                    {
                      MdamPred * head = 0;  // head of generated MdamPred's
                      MdamPred * tail = 0;

                      ItemExpr * orGroup = predId.getItemExpr();

                      orGroup->mdamPredGen(generator,&head,&tail,mdamHelper,NULL);

                      if (lastPred)
                        {
                          if ( CmpCommon::getDefault(RANGESPEC_TRANSFORMATION) == DF_ON )
                            {
                              MdamPred* curr = lastPred;
                              while(curr->getNext() != NULL)
                                curr=curr->getNext();
                              curr->setNext(head);
                            }
                          else
                            lastPred->setNext(head);
                        }
                      cc->setLastPred(tail);
                      lastPred = tail;  //@ZXmdam if 1st pred has head != tail, head is lost
                    } // for over preds
                } // if (predsPtr != NULL)
              cc = cc->getNext();
            } // for every order...
        } // for every column order list in the array (of disjuncts)

      // build the Mdam key info
      if (keytag > 0)
        keyLen += sizeof(short);
      *keyInfo = new(space) keyMdamGen(keyLen,
                                       work_cri_desc,
                                       key_atp_index,
                                       exclude_flag_atp_index,
                                       data_conv_error_atp_index,
                                       key_column_atp_index,
                                       first,
                                       last,
                                       reverseScan,
                                       generator->wHeap());

    }  // end of mdam case

  if (*keyInfo)
    (*keyInfo)->setKeytag(keytag);

  // reset map table to forget about the key object's work Atp
  
  // aside: this logic is more bloody than it should be because the
  // map table implementation doesn't accurately reflect the map table
  // abstraction
  
  generator->removeAll(keyBufferPartMapTable); // deletes anything that might have been
  // added after keyBufferPartMapTable (at
  // this writing we don't expect there to
  // be anything, but we want to be safe)
  // at this point keyBufferPartMapTable should be the last map table in the
  // global map table chain
  generator->removeLast();  // unlinks keyBufferPartMapTable and deletes it
  
  return 0;
};
Exemplo n.º 19
0
// computeHistoryBuffer
//
// Helper function that traverses the set of root sequence functions
// supplied by the compiler and dynamically determines the size
// of the history buffer.
// 
void PhysSequence::computeHistoryRows(const ValueIdSet &sequenceFunctions,//historyIds
                                      Lng32 &computedHistoryRows,
                                      Lng32 &unableToCalculate,
                                      NABoolean &unboundedFollowing, 
                                      Lng32 &minFollowingRows,
                                      const ValueIdSet &outputFromChild) 
{
  ValueIdSet children;
  ValueIdSet historyAttributes;
  Lng32 value = 0;

  for(ValueId valId = sequenceFunctions.init();
      sequenceFunctions.next(valId);
      sequenceFunctions.advance(valId)) 
  {
    if(valId.getItemExpr()->isASequenceFunction()) 
    {
      ItemExpr *itmExpr = valId.getItemExpr();

      switch(itmExpr->getOperatorType())
        {

        // THIS and NOT THIS are not dynamically computed
        //
        case ITM_THIS:
        case ITM_NOT_THIS:
          break;

        // The RUNNING functions and LastNotNull all need to go back just one row.
        //
        case ITM_RUNNING_SUM:
        case ITM_RUNNING_COUNT:
        case ITM_RUNNING_MIN:
        case ITM_RUNNING_MAX:
        case ITM_RUNNING_CHANGE:   
        case ITM_LAST_NOT_NULL:
          computedHistoryRows = MAXOF(computedHistoryRows, 2);
          break;
        ///set to unable to compute for now-- will change later to compte values from frameStart_ and frameEnd_
        case ITM_OLAP_SUM:
        case ITM_OLAP_COUNT:
        case ITM_OLAP_MIN:
        case ITM_OLAP_MAX:
        case ITM_OLAP_RANK:
        case ITM_OLAP_DRANK:
        {
          if ( !outputFromChild.contains(itmExpr->getValueId()))
          {
            ItmSeqOlapFunction * olap = (ItmSeqOlapFunction*)itmExpr;

            if (olap->isFrameStartUnboundedPreceding()) //(olap->getframeStart() == - INT_MAX)
            {
              computedHistoryRows = MAXOF(computedHistoryRows, 2);
            }
            else
            {
              computedHistoryRows = MAXOF(computedHistoryRows, ABS(olap->getframeStart()) + 2);
            }
            if (!olap->isFrameEndUnboundedFollowing()) //(olap->getframeEnd() != INT_MAX)
            {
              computedHistoryRows = MAXOF(computedHistoryRows, ABS(olap->getframeEnd()) + 1);
            }

            if (olap->isFrameEndUnboundedFollowing()) //(olap->getframeEnd() == INT_MAX)
            {
              unboundedFollowing = TRUE;
              if (olap->getframeStart() > 0) 
              {
                minFollowingRows = ((minFollowingRows > olap->getframeStart()) ?  
                                    minFollowingRows : olap->getframeStart());
              }
            } else  if (olap->getframeEnd() > 0)
            {
              minFollowingRows = ((minFollowingRows > olap->getframeEnd()) ?  
                                  minFollowingRows : olap->getframeEnd());
            }
          }
        }

        break;

        // If 'rows since', we cannot determine how much history is needed.  
        case ITM_ROWS_SINCE:
          unableToCalculate = 1;
          break;

        // The MOVING and OFFSET functions need to go back as far as the value
        // of their second child.
        //
        //  The second argument can be:
        //    Constant: for these, we can use the constant value to set the upper bound
        //              for the history buffer.
        //    ItmScalarMinMax(child0, child1) (with operType = ITM_SCALAR_MIN)  
        //      - if child0 or child1 is a constant, then we can use either one
        //        to set the upper bound.
        
        case ITM_MOVING_MIN:
        case ITM_MOVING_MAX:
        case ITM_OFFSET:
         
          for(Lng32 i = 1; i < itmExpr->getArity(); i++)
          {
            if (itmExpr->child(i)->getOperatorType() != ITM_NOTCOVERED)
            {
              ItemExpr * exprPtr = itmExpr->child(i);
              NABoolean negate;
              ConstValue *cv = exprPtr->castToConstValue(negate);
              if (cv AND cv->canGetExactNumericValue())
                {
                  Lng32 scale;
                  Int64 value64 = cv->getExactNumericValue(scale);

                  if(scale == 0 && value64 >= 0 && value64 < INT_MAX) 
                    {
                      value64 = (negate ? -value64 : value64);
                      value = MAXOF((Lng32)value64, value);
                    }
                 }
              else
                {
                  if (exprPtr->getOperatorType() == ITM_SCALAR_MIN)
                    {
                      for(Lng32 j = 0; j < exprPtr->getArity(); j++)
                        {
                          if (exprPtr->child(j)->getOperatorType()
                            != ITM_NOTCOVERED)
                            {
                               ItemExpr * exprPtr1 = exprPtr->child(j);
                               NABoolean negate1;
                               ConstValue *cv1 = exprPtr1->castToConstValue(negate1);
                               if (cv1 AND cv1->canGetExactNumericValue())
                                 {
                                   Lng32 scale1;
                                   Int64 value64_1 = cv1->getExactNumericValue(scale1);

                                   if(scale1 == 0 && value64_1 >= 0 && value64_1 < INT_MAX) 
                                     {
                                       value64_1 = (negate1 ? -value64_1 : value64_1);
                                       value = MAXOF((Lng32)value64_1, value);
                                     }
                                  }
                              }
                          }   
                     }   
                }  // end of inner else
            }// end of if

          }// end of for

          // Check if the value is greater than zero.
          // If it is, then save the value, but first
          // increment the returned ConstValue by one.
          // Otherwise, the offset or moving value was unable
          // to be calculated.

          if (value > 0)
          {
            value++;
            computedHistoryRows = MAXOF(computedHistoryRows, value);
            value = 0;
          }
          else
            unableToCalculate = 1;

          break;

        default:
          CMPASSERT(0);
        }
    }
   
    // Gather all the children, and if not empty, recurse down to the
    // next level of the tree.
    //

    for(Lng32 i = 0; i < valId.getItemExpr()->getArity(); i++) {
      if (//valId.getItemExpr()->child(i)->getOperatorType() != ITM_NOTCOVERED //old stuff
          !outputFromChild.contains(valId.getItemExpr()->child(i)->getValueId()))
      {
        children += valId.getItemExpr()->child(i)->getValueId();
      }
    }
  }
  
  if (NOT children.isEmpty())
  {
    computeHistoryRows(children, 
                       computedHistoryRows, 
                       unableToCalculate, 
                       unboundedFollowing, 
                       minFollowingRows,
                       outputFromChild);  
  }
} // PhysSequence::computeHistoryRows
Exemplo n.º 20
0
void PhysSequence::computeReadNReturnItems( ValueId topSeqVid,
                                            ValueId vid,
                                            const ValueIdSet &outputFromChild,
                                            CollHeap *wHeap)
{
  ItemExpr * itmExpr = vid.getItemExpr();


  if (outputFromChild.contains(vid)) 
  {
    return;
  }
  //test if itm_minus and then if negative offset ....
  if ( itmExpr->getOperatorType() == ITM_OFFSET &&
      ((ItmSeqOffset *)itmExpr)->getOffsetConstantValue() < 0)
  {
    readSeqFunctions() -= topSeqVid;
    returnSeqFunctions() += topSeqVid;

    readSeqFunctions() += itmExpr->child(0)->castToItemExpr()->getValueId();
    return;
  }
  
  if (itmExpr->getOperatorType() == ITM_MINUS)
  {
    ItemExpr * chld0  = itmExpr->child(0)->castToItemExpr();
    if ( chld0->getOperatorType() == ITM_OFFSET &&
        ((ItmSeqOffset *)chld0)->getOffsetConstantValue() <0)
    {
      readSeqFunctions() -= topSeqVid;
      returnSeqFunctions() += topSeqVid;

      readSeqFunctions() += chld0->child(0)->castToItemExpr()->getValueId();

      ItemExpr * chld1  = itmExpr->child(1)->castToItemExpr();
      if (chld1->getOperatorType() == ITM_OFFSET &&
          ((ItmSeqOffset *)chld1)->getOffsetConstantValue() < 0)
      {
        readSeqFunctions() += chld1->child(0)->castToItemExpr()->getValueId();
      }
      else
      {
        readSeqFunctions() += chld1->getValueId();
      }
      return;
    }
    
  }
  
  
  if (itmExpr->getOperatorType() == ITM_OLAP_MIN || 
           itmExpr->getOperatorType() == ITM_OLAP_MAX) 
  { 
    ItmSeqOlapFunction * olap = (ItmSeqOlapFunction *)itmExpr;
    if (olap->getframeEnd()>0)
    {
      readSeqFunctions() -= topSeqVid;
      returnSeqFunctions() += topSeqVid;

      ItemExpr *newChild = new(wHeap) Convert (itmExpr->child(0)->castToItemExpr());
      newChild->synthTypeAndValueId(TRUE);

      itmExpr->child(0) = newChild;

      readSeqFunctions() += newChild->getValueId();
      return;
    }
  }
  
  if (itmExpr->getOperatorType() == ITM_SCALAR_MIN || 
           itmExpr->getOperatorType() == ITM_SCALAR_MAX) 
  {
    ItemExpr * chld0  = itmExpr->child(0)->castToItemExpr();
    ItemExpr * chld1  = itmExpr->child(1)->castToItemExpr();
    if ((chld0->getOperatorType() == ITM_OLAP_MIN && chld1->getOperatorType() == ITM_OLAP_MIN )|| 
        (chld0->getOperatorType() == ITM_OLAP_MAX && chld1->getOperatorType() == ITM_OLAP_MAX ))
    {
      ItmSeqOlapFunction * olap0 = (ItmSeqOlapFunction *)chld0;
      ItmSeqOlapFunction * olap1 = (ItmSeqOlapFunction *)chld1;
      if ( olap1->getframeEnd()>0)
      { 
        CMPASSERT(olap0->getframeEnd()==0);

        readSeqFunctions() -= topSeqVid;
        returnSeqFunctions() += topSeqVid;
        readSeqFunctions() += olap0->getValueId();
        
        ItemExpr *newChild = new(wHeap) Convert (olap1->child(0)->castToItemExpr());
        newChild->synthTypeAndValueId(TRUE);

        olap1->child(0) = newChild;

        readSeqFunctions() += newChild->getValueId();
      }
      else
      {
        CMPASSERT(olap1->getframeEnd()==0);

        readSeqFunctions() -= topSeqVid;
        returnSeqFunctions() += topSeqVid;
        readSeqFunctions() += olap1->getValueId();

        ItemExpr *newChild = new(wHeap) Convert (olap0->child(0)->castToItemExpr());
        newChild->synthTypeAndValueId(TRUE);

        olap0->child(0) = newChild;

        readSeqFunctions() += newChild->getValueId();
      }
      return;
    }
  }

  for (Int32 i= 0 ; i < itmExpr->getArity(); i++)
  {
    ItemExpr * chld= itmExpr->child(i);
    computeReadNReturnItems(topSeqVid,
                            chld->getValueId(),
                            outputFromChild,
                            wHeap);
  }
}//void PhysSequence::computeReadNReturnItems(ItemExpr * other)
Exemplo n.º 21
0
// getHistoryAttributes
//
// Helper function that traverses the set of root sequence functions
// supplied by the compiler and constructs the set of all of the
// attributes that must be materialized in the history row.
// 
void PhysSequence::getHistoryAttributes(const ValueIdSet &sequenceFunctions,
                                        const ValueIdSet &outputFromChild,
                                        ValueIdSet &historyAttributes,
                                        NABoolean addConvNodes,
                                        CollHeap *wHeap,
                                        ValueIdMap *origAttributes) const
{
  if(addConvNodes && !origAttributes) {
    origAttributes = new (wHeap) ValueIdMap();
  }

  ValueIdSet children;
  for(ValueId valId = sequenceFunctions.init();
      sequenceFunctions.next(valId);
      sequenceFunctions.advance(valId)) {

    if(valId.getItemExpr()->isASequenceFunction()) {
      ItemExpr *itmExpr = valId.getItemExpr();

      switch(itmExpr->getOperatorType())
        {
          // The child needs to be in the history row.
          //
        case ITM_OFFSET:
        case ITM_ROWS_SINCE:
        case ITM_THIS:
        case ITM_NOT_THIS:

          // If the child needs to be in the history buffer, then
          // add a Convert node to force the value to be moved to the
          // history buffer.
          if (addConvNodes)
            {
              itmExpr->child(0) = 
                addConvNode(itmExpr->child(0), origAttributes, wHeap);
            }
          historyAttributes += itmExpr->child(0)->getValueId();
          break;

          // The sequence function needs to be in the history row.
          //
        case ITM_RUNNING_SUM:
        case ITM_RUNNING_COUNT:
        case ITM_RUNNING_MIN:
        case ITM_RUNNING_MAX:
        case ITM_LAST_NOT_NULL:
          historyAttributes += itmExpr->getValueId();
          break;
/*
        // after PhysSequence precode gen OLAP sum and count are already transform,ed into running
        // this is used during optimization phase-- 
        case ITM_OLAP_SUM:
        case ITM_OLAP_COUNT:
        case ITM_OLAP_RANK:
        case ITM_OLAP_DRANK:
          if (addConvNodes)
            {
              itmExpr->child(0) = 
                addConvNode(itmExpr->child(0), origAttributes, wHeap);
            }

          historyAttributes += itmExpr->child(0)->getValueId();
          //historyAttributes += itmExpr->getValueId();	  
          break;
*/
          // The child and sequence function need to be in the history row.
          //
        case ITM_OLAP_MIN:
        case ITM_OLAP_MAX:
        case ITM_MOVING_MIN:
        case ITM_MOVING_MAX:

          // If the child needs to be in the history buffer, then
          // add a Convert node to force the value to be moved to the
          // history buffer.
          if (addConvNodes)
            {
              itmExpr->child(0) = 
                addConvNode(itmExpr->child(0), origAttributes, wHeap);
            }

          historyAttributes += itmExpr->child(0)->getValueId();
          historyAttributes += itmExpr->getValueId();	  
          break;

        case ITM_RUNNING_CHANGE:
          if (itmExpr->child(0)->getOperatorType() == ITM_ITEM_LIST)
            {
              // child is a multi-valued expression
              // 
              ExprValueId treePtr = itmExpr->child(0);

              ItemExprTreeAsList changeValues(&treePtr,
                                              ITM_ITEM_LIST,
                                              RIGHT_LINEAR_TREE);

              CollIndex nc = changeValues.entries();
              
              ItemExpr *newChild = NULL;
              if(addConvNodes) {
                newChild = addConvNode(changeValues[nc-1], origAttributes, wHeap);
                historyAttributes += newChild->getValueId();
              } else {
                historyAttributes += changeValues[nc-1]->getValueId();
              }

              // add each item in the list
              // 
              for (CollIndex i = nc; i > 0; i--)
                {
                  if(addConvNodes) {
                    ItemExpr *conv
                      = addConvNode(changeValues[i-1], origAttributes, wHeap);

                    newChild = new(wHeap) ItemList(conv, newChild);
                    newChild->synthTypeAndValueId(TRUE);
                    historyAttributes += conv->getValueId();
                  } else {
                    historyAttributes += changeValues[i-1]->getValueId();
                  }
                }

              if(addConvNodes) {
                itmExpr->child(0) = newChild;
              }
            }
          else
            {

              // If the child needs to be in the history buffer, then
              // add a Convert node to force the value to be moved to the
              // history buffer.
              if (addConvNodes)
                {
                  itmExpr->child(0) = 
                    addConvNode(itmExpr->child(0), origAttributes, wHeap);
                }

              historyAttributes += itmExpr->child(0)->getValueId();
            }

          historyAttributes += itmExpr->getValueId();  
          break;

        default:
          CMPASSERT(0);
        }
    }

    // Gather all the children, and if not empty, recurse down to the
    // next level of the tree.
    //
    for(Lng32 i = 0; i < valId.getItemExpr()->getArity(); i++) 
    {
      if (!outputFromChild.contains(valId.getItemExpr()->child(i)->getValueId()))
        //!valId.getItemExpr()->child(i)->nodeIsPreCodeGenned()) 
      {
        children += valId.getItemExpr()->child(i)->getValueId();
      }
    }
  }
  
  if (NOT children.isEmpty())
  {
    getHistoryAttributes( children,
                          outputFromChild,
                          historyAttributes, 
                          addConvNodes, 
                          wHeap, 
                          origAttributes);
  }

} // PhysSequence::getHistoryAttributes
Exemplo n.º 22
0
// ------------------------------------------------------------------------------
// create my colStats based on my child's output, by converting the columns to 
// that of mine
// ------------------------------------------------------------------------------
void EstLogProp::mapOutputsForUpdate(const GenericUpdate & updateExpr, 
				     const ValueIdMap & updateSelectValueIdMap)
{

  TableDesc * updateTable = updateExpr.getTableDesc();

  for ( CollIndex i = 0; i < colStats().entries(); i++ )
  {
    ColStatDescSharedPtr colStatPtr = (colStats())[i];     
    const ValueId columnId = colStatPtr->getVEGColumn();

    ValueId updateColVEGOutputId;
    updateSelectValueIdMap.mapValueIdUp(updateColVEGOutputId, columnId);
    ValueId updateBaseColumnId;

    if (updateColVEGOutputId != columnId)
    {
      updateBaseColumnId = updateColVEGOutputId;
     
      ValueIdSet baseColumns;
      updateColVEGOutputId.getItemExpr()->findAll( ITM_BASECOLUMN, baseColumns, TRUE, TRUE );

      // from all the columns extracted, get the one for Insert table
      TableDesc * thisTable = NULL;
      for (ValueId column = baseColumns.init(); baseColumns.next(column);
	  baseColumns.advance(column) )
      {
	ItemExpr * columnExpr = column.getItemExpr();
	thisTable = ((BaseColumn *)columnExpr)->getTableDesc();
	if (thisTable == updateTable)
	{
	  // set my column as the base column
	  updateBaseColumnId = column;
	  break;
	}
       }
       
       ColStatsSharedPtr inColStats = colStatPtr->getColStats();
       ColStatsSharedPtr colStatsForUpdate(new (STMTHEAP) ColStats (*inColStats,STMTHEAP));

       colStatsForUpdate->setStatColumn(updateBaseColumnId.getNAColumn());
       // use this ColStat to generate new ColStat corresponding to the char output
       // of the Update expression

       ColStatDescSharedPtr colStatDescForUpdate(new (STMTHEAP) ColStatDesc(colStatsForUpdate, 
					    updateBaseColumnId,  // ValueId of the column that will be used 
							 // as a column name, VEG and mergeStats
					    STMTHEAP), STMTHEAP);
       colStatDescForUpdate->VEGColumn() = updateColVEGOutputId;
       colStatDescForUpdate->mergeState().clear() ;
       colStatDescForUpdate->mergeState().insert(updateBaseColumnId);

       // Remove the old colStat and insert this colStat into the result colStatDescList
       colStats().removeAt( i );

       colStats().insertDeepCopyAt(i, colStatDescForUpdate, // colStats to be copied
				     1,			   // scale
				     FALSE);

    }
  }
}
Exemplo n.º 23
0
// ---------------------------------------------------------------------
// Utility Routine: pickOutputs
//
// From the given ColStatDescList, populate columnStats_ with column
// descriptors that are useful based on the characteristic outputs for
// the group.
//
// Always include in the output the current histograms of the input data,
// and, if the histogram is contained in the required output list, then
// this is a useful histogram and will also be output.
//
// ---------------------------------------------------------------------
void EstLogProp::pickOutputs( ColStatDescList & columnStats,
			      const EstLogPropSharedPtr& inputEstLogProp,
			      const ValueIdSet specifiedOutputs,
			      const ValueIdSet predSet)
{

  const ColStatDescList & outerColStatsList = inputEstLogProp->getColStats();

  ValueIdSet colsRequiringHistograms = specifiedOutputs;
  
  // (i) see if the selection predicates contain any constant value or a 
  // constant expression

  // (ii) check if there are any columns of this table being joined to some other
  // columns, which do not appear as characteristics outputs. There should be
  // histograms available for these columns, as these might be needed later.
  // This problem was seen for temporary tables created as normal_tables by the
  // triggers.


  colsRequiringHistograms.addSet(predSet.getColumnsForHistogram());
  colStats().setMCSkewedValueLists(columnStats.getMCSkewedValueLists()) ;

  NABoolean colStatDescAdded = FALSE;

  for (CollIndex i=0; i < columnStats.entries(); i++)
    {
      // we probably don't need 'em all, but this is the easiest way to
      // grab all of the multi-column uec information we'll need later
      colStats().insertIntoUecList (columnStats.getUecList()) ;
      colStats().setScanRowCountWithoutHint(columnStats.getScanRowCountWithoutHint());
      NABoolean found = FALSE;

      // Note: The following inserts into a ColStatDescList should not
      // have to be deep copies.  From this point on, ColStatDescs that
      // describe the output of the calling operator are read-only.

      ColStatDescSharedPtr colStatDesc = columnStats[i];

      // the value-id we're looking for
      const ValueId columnId = colStatDesc->getVEGColumn() ;

      for (CollIndex j=0 ; j < outerColStatsList.entries() ; j++)
	{
	  if (columnId == outerColStatsList[j]->getVEGColumn() OR
              (CmpCommon::context()->showQueryStats()))
            {
              colStats().insert(colStatDesc) ;
              found = TRUE;
              if(!colStatDescAdded)
                colStatDescAdded = TRUE;
              break ; // jump to next ColStatDesc
            }
	}

    // OK, the valueid doesn't match directly -- but there are still a
    // couple of things to check in order to verify whether or not we're
    // interested in keeping the i'th ColStatDesc ...

	ValueId throwaway ; // used by the second clause below

    if ( NOT found  AND
	 (columnId != NULL_VALUE_ID) AND
         (colsRequiringHistograms.contains (columnId) OR
          colsRequiringHistograms.referencesTheGivenValue (columnId, throwaway) OR
	  columnId.isInvolvedInJoinAndConst() OR
          CmpCommon::context()->showQueryStats() )
	)
	{
	  colStats().insert(colStatDesc);
	  found = TRUE;
	  if(!colStatDescAdded)
	    colStatDescAdded = TRUE;
	}
	
	if (CURRSTMT_OPTDEFAULTS->incorporateSkewInCosting())
	{
	  // if the column is referenced for histogram, but is 
	  // not needed beyond this time , then we shall save its  
	  // max freq, which might be used later in costing if this
	  // column is a part of the partitioning key

	  ColStatsSharedPtr stat = colStatDesc->getColStats();
	  if (!(stat->isVirtualColForHist() ) && NOT found &&
                    !(stat->isOrigFakeHist() ) )
	  {
            const ValueId col = colStatDesc->getColumn();
            ColAnalysis * colAnalysis = col.colAnalysis();
            if (colAnalysis)
            {
              NAColumn * column = stat->getStatColumns()[0];

              if (column->isReferencedForHistogram())
              {
                CostScalar maxFreq = columnStats.getMaxFreq(columnId);
                colAnalysis->setMaxFreq(maxFreq);
                colAnalysis->setFinalUec(stat->getTotalUec());
                colAnalysis->setFinalRC(stat->getRowcount());
              }
            }
          }
	}
      } // for columnStats.entries()
      if(!colStatDescAdded && columnStats.entries() > 0)
        colStats().insert(columnStats[0]) ;
} // pickOutputs
Exemplo n.º 24
0
// -----------------------------------------------------------------------
// make an IndexDesc from an existing TableDesc and an NAFileSet
// -----------------------------------------------------------------------
IndexDesc::IndexDesc(TableDesc *tdesc, 
                     NAFileSet *fileSet, 
                     CmpContext* cmpContext)
     : tableDesc_(tdesc), clusteringIndexFlag_(FALSE), 
       identityColumnUniqueIndexFlag_(FALSE), partFunc_(NULL),
       fileSet_(fileSet), cmpContext_(cmpContext), scanBasicCosts_(NULL)
{
  DCMPASSERT( tdesc != NULL AND fileSet != NULL );

  Lng32 ixColNumber;
  ValueId keyValueId;
  ValueId baseValueId;

  const NATable *naTable = tdesc->getNATable();

  indexLevels_ = fileSet_->getIndexLevels();

  // ---------------------------------------------------------------------
  // Make the column list for the index or vertical partition.
  // Any reference to index also holds for vertical partitions.
  // ---------------------------------------------------------------------
  const NAColumnArray & allColumns = fileSet_->getAllColumns();

  // any index gets a new set of IndexColumn
  // item expressions and new value ids
  CollIndex i = 0;
  for (i = 0; i < allColumns.entries(); i++)
    {
      ItemExpr *baseItemExpr = NULL;

      // make a new IndexColumn item expression, indicate how it is
      // defined (in terms of base table columns) and give a value
      // id to the new IndexColumn expression
      if (allColumns[i]->getPosition() >= 0)
	{
	  baseValueId =
	    tdesc->getColumnList()[allColumns[i]->getPosition()];
	  baseItemExpr = baseValueId.getItemExpr();
	}
      else
	{
	  // this column doesn't exist in the base table.
	  // This is the KEYTAG column of sql/mp indices.
	  ItemExpr * keytag = new(wHeap())
            NATypeToItem((NAType *)(allColumns[i]->getType()));
	  keytag->synthTypeAndValueId();
	  baseValueId = keytag->getValueId();

	  baseItemExpr = NULL;
	}

#pragma nowarn(1506)   // warning elimination 
      IndexColumn *ixcol = new(wHeap()) IndexColumn(fileSet_,i,baseValueId);
#pragma warn(1506)  // warning elimination 
      ixcol->synthTypeAndValueId();

      // add the newly obtained value id to the index column list
      indexColumns_.insert(ixcol->getValueId());

      // if the index column is defined as a 1:1 copy of a base
      // column, add it as an equivalent index column (EIC) to the
      // base column item expression
      if ((baseItemExpr) &&
	  (baseItemExpr->getOperatorType() == ITM_BASECOLUMN))
	((BaseColumn *) baseItemExpr)->addEIC(ixcol->getValueId());
    }

  // ---------------------------------------------------------------------
  // make the list of access key columns in the index and make a list
  // of the order that the index provides
  // ---------------------------------------------------------------------
  const NAColumnArray & indexKeyColumns = fileSet_->getIndexKeyColumns();
  for (i = 0; i < indexKeyColumns.entries(); i++)
    {
      // which column of the index is this (usually this will be == i)
#pragma nowarn(1506)   // warning elimination 

      if ( !naTable->isHbaseTable() )
         ixColNumber = allColumns.index(indexKeyColumns[i]);
      else {
         // For Hbase tables, a new NAColumn is created for every column
         // in an index. The above pointer-based lookup for the key column
         // in base table will only find the index column itself. The
         // fix is to lookup by the column name and type as is 
         // implemented by the getColumnPosition() method.
         ixColNumber = allColumns.getColumnPosition(*indexKeyColumns[i]);
         CMPASSERT(ixColNumber >= 0);
      }

#pragma warn(1506)  // warning elimination 

      // insert the value id of the index column into the key column
      // value id list
      keyValueId = indexColumns_[ixColNumber];
      indexKey_.insert(keyValueId);

      // insert the same value id into the order list, if the column
      // is in ascending order, otherwise insert the inverse of the
      // column
      if (indexKeyColumns.isAscending(i))
	{
	  orderOfKeyValues_.insert(keyValueId);
	}
      else
	{
	  InverseOrder *invExpr = new(wHeap())
	    InverseOrder(keyValueId.getItemExpr());
	  invExpr->synthTypeAndValueId();
	  orderOfKeyValues_.insert(invExpr->getValueId());
	}
    }

  markIdentityColumnUniqueIndex(tdesc);

  // ---------------------------------------------------------------------
  // Find the clustering key columns in the index and store their value
  // ids in clusteringKey_
  // ---------------------------------------------------------------------
  NABoolean found = TRUE;
  const NAColumnArray & clustKeyColumns =
                      naTable->getClusteringIndex()->getIndexKeyColumns();

  for (i = 0; i < clustKeyColumns.entries() AND found; i++)
    {
      // which column of the index is this?
#pragma nowarn(1506)   // warning elimination 
      ixColNumber = allColumns.index(clustKeyColumns[i]);
#pragma warn(1506)  // warning elimination 

      found = (ixColNumber != NULL_COLL_INDEX);

      if (found)
	{
	  // insert the value id of the index column into the clustering key
	  // value id list
	  keyValueId = indexColumns_[ixColNumber];
	  clusteringKey_.insert(keyValueId);
	}
      else
	{
	  // clustering key isn't contained in this index, clear the
	  // list that is supposed to indicate the clustering key
	  clusteringKey_.clear();
	}
    }

  // ---------------------------------------------------------------------
  // make the list of partitioning key columns in the index and make a list
  // of the order that the partitioning provides
  // ---------------------------------------------------------------------
  const NAColumnArray & partitioningKeyColumns 
                                    = fileSet_->getPartitioningKeyColumns();
  for (i = 0; i < partitioningKeyColumns.entries(); i++)
    {
      // which column of the index is this 
#pragma nowarn(1506)   // warning elimination 
      ixColNumber = allColumns.index(partitioningKeyColumns[i]);
#pragma warn(1506)  // warning elimination 

      // insert the value id of the index column into the partitioningkey column
      // value id list
      keyValueId = indexColumns_[ixColNumber];
      partitioningKey_.insert(keyValueId);

      // insert the same value id into the order list, if the column
      // is in ascending order, otherwise insert the inverse of the
      // column
      if (partitioningKeyColumns.isAscending(i))
	{
	  orderOfPartitioningKeyValues_.insert(keyValueId);
	}
      else
	{
	  InverseOrder *invExpr = new(wHeap())
	    InverseOrder(keyValueId.getItemExpr());
	  invExpr->synthTypeAndValueId();
	  orderOfPartitioningKeyValues_.insert(invExpr->getValueId());
	}
    }

  // ---------------------------------------------------------------------
  // If this index is partitioned, find the partitioning key columns
  // and build a partitioning function.
  // ---------------------------------------------------------------------
  if ((fileSet_->getCountOfFiles() > 1) ||
      (fileSet_->getPartitioningFunction() &&
       fileSet_->getPartitioningFunction()->
       isARoundRobinPartitioningFunction()))
    partFunc_ = fileSet_->getPartitioningFunction()->
      createPartitioningFunctionForIndexDesc(this);
  
} // IndexDesc::IndexDesc()
Exemplo n.º 25
0
short
PhysSample::codeGen(Generator *generator) 
{  
  // Get a local handle on some of the generator objects.
  //
  CollHeap *wHeap = generator->wHeap();
  Space *space = generator->getSpace();
  MapTable *mapTable = generator->getMapTable();
  ExpGenerator *expGen = generator->getExpGenerator();

  // Allocate a new map table for this node. This must be done
  // before generating the code for my child so that this local
  // map table will be sandwiched between the map tables already
  // generated and the map tables generated by my offspring.
  //
  // Only the items available as output from this node will
  // be put in the local map table. Before exiting this function, all of
  // my offsprings map tables will be removed. Thus, none of the outputs 
  // from nodes below this node will be visible to nodes above it except 
  // those placed in the local map table and those that already exist in
  // my ancestors map tables. This is the standard mechanism used in the
  // generator for managing the access to item expressions.
  //
  MapTable *localMapTable = generator->appendAtEnd();

  // Since this operation doesn't modify the row on the way down the tree,
  // go ahead and generate the child subtree. Capture the given composite row
  // descriptor and the child's returned TDB and composite row descriptor.
  //
  ex_cri_desc * givenCriDesc = generator->getCriDesc(Generator::DOWN);
  child(0)->codeGen(generator);
  ComTdb *childTdb = (ComTdb*)generator->getGenObj();
  ex_cri_desc * childCriDesc = generator->getCriDesc(Generator::UP);
  ExplainTuple *childExplainTuple = generator->getExplainTuple();

  // Geneate the sampling expression.
  //
  ex_expr *balExpr = NULL;
  Int32 returnFactorOffset = 0;
  ValueId val;
  val = balanceExpr().init();
  if(balanceExpr().next(val))
    expGen->generateSamplingExpr(val, &balExpr, returnFactorOffset);

  // Alias the sampleColumns() so that they reference the underlying
  // expressions directly. This is done to avoid having to generate and
  // execute a project expression that simply moves the columns from
  // one tupp to another to reflect the application of the sampledCol
  // function.
  //
//   ValueId valId;
//   for(valId = sampledColumns().init();
//       sampledColumns().next(valId);
//       sampledColumns().advance(valId))
//     {
//       MapInfo *mapInfoChild = localMapTable->getMapInfoAsIs
// 	(valId.getItemExpr()->child(0)->castToItemExpr()->getValueId());
//       GenAssert(mapInfoChild, "Sample::codeGen -- no child map info.");
//       Attributes *attr = mapInfoChild->getAttr();
//       MapInfo *mapInfo = localMapTable->addMapInfoToThis(valId, attr);
//       mapInfo->codeGenerated();
//     }
// check if any of the columns inthe sampled columns are lob columns. If so, return an error.
  ValueId valId;
  for(valId = sampledColumns().init();
      sampledColumns().next(valId);
      sampledColumns().advance(valId))
    {
      const NAType &colType = valId.getType();
      if ((colType.getFSDatatype() == REC_BLOB) ||
	  (colType.getFSDatatype() == REC_CLOB))
	{
	   *CmpCommon::diags() << DgSqlCode(-4322);
	   GenExit();
	}
    }
  // Now, remove all attributes from the map table except the 
  // the stuff in the local map table -- the result of this node.
  //
//  localMapTable->removeAll();

  // Generate the expression to evaluate predicate on the sampled row.
  //
  ex_expr *postPred = 0;
  if (!selectionPred().isEmpty()) {
    ItemExpr * newPredTree 
      = selectionPred().rebuildExprTree(ITM_AND,TRUE,TRUE);

    expGen->generateExpr(newPredTree->getValueId(), ex_expr::exp_SCAN_PRED,
			 &postPred);
  }

  // Construct the Sample TDB.
  //
  ComTdbSample *sampleTdb
    = new(space) ComTdbSample(NULL,
			      balExpr,
			      returnFactorOffset,
			      postPred,
			      childTdb,
			      givenCriDesc,
			      childCriDesc,
			      (queue_index)getDefault(GEN_SAMPLE_SIZE_DOWN),
			      (queue_index)getDefault(GEN_SAMPLE_SIZE_UP));
  generator->initTdbFields(sampleTdb);

  if(!generator->explainDisabled()) {
    generator->
      setExplainTuple(addExplainInfo(sampleTdb,
                                     childExplainTuple,
                                     0,
                                     generator));
  }

  generator->setCriDesc(givenCriDesc, Generator::DOWN);
  generator->setCriDesc(childCriDesc, Generator::UP);
  generator->setGenObj(this, sampleTdb);

  return 0;
}