示例#1
0
// --------------------------------------------------------------------- //
//THINK: this is specific to PC and DC??
void DecompVarPool::reExpand(const DecompConstraintSet& modelCore,
                             const double                tolZero)
{
   //THIS IS WRONG...
   //in masterSI, we have
   //A'', convexity, cuts
   //in modelCore.M we have A'', cuts
   //the sparseCol that you come out with the end here has things in the wrong
   //order: //A'', cuts, convexity
   double* denseCol = new double[modelCore.getNumRows() + 1];
   vector<DecompWaitingCol>::iterator vi;

   for (vi = begin(); vi != end(); vi++) {
      // ---
      // --- get dense column = A''s, append convexity constraint on end
      // ---
      modelCore.M->times((*vi).getVarPtr()->m_s, denseCol);
      denseCol[modelCore.getNumRows()] = 1.0;
      // ---
      // --- create a sparse column from the dense column
      // ---
      CoinPackedVector* sparseCol
      = UtilPackedVectorFromDense(modelCore.getNumRows() + 1,
                                  denseCol, tolZero);
      (*vi).deleteCol();
      (*vi).setCol(sparseCol);
   }

   setColsAreValid(true);
   UTIL_DELARR(denseCol);
}
示例#2
0
//===========================================================================//
void DecompAlgoRC::phaseDone()
{
   //take the current set of variables and solve DW master to get primal
   //TODO: right now, creating from scratch each time -- really need
   //      to append and warm start - esp if doing alot of branching
   //delete m_masterSI;
   //m_masterSI = NULL;
   //m_masterSI = new OsiLpSolverInterface();
   //CoinAssertHint(m_masterSI, "Error: Out of Memory");
   //m_masterSI->messageHandler()->setLogLevel(m_param.LogLpLevel);
   DecompConstraintSet*           modelCore   = m_modelCore.getModel();
#if 0
   //---
   //--- Initialize the solver interface for the master problem.
   //--- PC: min c(s lam)
   //---     A''s   lam   >= b'',
   //---     sum{s} lam_s  = 1  ,
   //---            lam_s >= 0  , s in F'[0]
   //--- modelCore contains [A'', b''], from the original model in
   //--- terms of x. In this function we create the DW-LP in terms of
   //--- lambda, [[A''s, 1], [b'', 1]] and load that into the OSI
   //--- interface m_masterSI.
   //---
   //--- NOTE: if 0 is feasible to subproblem, we can relax convexity to <= 1
   //---
   UtilPrintFuncBegin(m_osLog, m_classTag,
                      "phaseDone()", m_param.LogDebugLevel, 2);
   CoinAssert(m_vars.size() > 0);
   int nColsCore = modelCore->getNumCols();
   int nRowsCore = modelCore->getNumRows();
   modelCore->nBaseRowsOrig = modelCore->nBaseRows;
   modelCore->nBaseRows     = nRowsCore;
   //THINK? should initVars be in pool and do addVarsFromPool here?
   CoinPackedMatrix* M = new CoinPackedMatrix(true, 0, 0);
   M->setDimensions(nRowsCore + 1, 0);
   const int n_cols  = static_cast<int>(m_vars.size());
   double* colLB    = new double[n_cols];
   double* colUB    = new double[n_cols];
   double* obj      = new double[n_cols];
   double* denseCol = new double[nRowsCore + 1];
   CoinAssertHint(colLB && colUB && obj && denseCol, "Error: Out of Memory");
   int col_index     = 0;
   DecompVarList::iterator li;

   for (li = m_vars.begin(); li != m_vars.end(); li++) {
      UTIL_DEBUG(m_param.LogDebugLevel, 5,
                 (*li)->print(m_osLog, m_app);
                );
      //---
      //--- get dense column = A''s, append convexity constraint on end
      //---
      modelCore->M->times((*li)->m_s, denseCol);
      denseCol[nRowsCore] = 1.0;
      //---
      //--- create a sparse column from the dense column
      //---
      // THINK: do i need a DecompCol?
      // THINK: does this allocate memory for coinpackedvec twice?
      CoinPackedVector* sparseCol
      = UtilPackedVectorFromDense(nRowsCore + 1,
                                  denseCol, m_param.TolZero);
      UTIL_DEBUG(m_param.LogDebugLevel, 5,
                 (*m_osLog) << "\nSparse Col: \n";
                 UtilPrintPackedVector(*sparseCol, m_osLog);
                );