Exemple #1
0
void peanoclaw::mappings::Cleanup::mergeWithWorker(
    peanoclaw::Cell&           localCell,
    const peanoclaw::Cell&     receivedMasterCell,
    const tarch::la::Vector<DIMENSIONS,double>&  cellCentre,
    const tarch::la::Vector<DIMENSIONS,double>&  cellSize,
    int                                          level
) {
    logTraceInWith2Arguments( "mergeWithWorker(...)", localCell.toString(), receivedMasterCell.toString() );
    // @todo Insert your code here
    logTraceOutWith1Argument( "mergeWithWorker(...)", localCell.toString() );
}
Exemple #2
0
void peanoclaw::mappings::ValidateGrid::mergeWithWorker(
  peanoclaw::Cell&           localCell, 
  const peanoclaw::Cell&     receivedMasterCell,
  const tarch::la::Vector<DIMENSIONS,double>&  cellCentre,
  const tarch::la::Vector<DIMENSIONS,double>&  cellSize,
  int                                          level
) {
  logTraceInWith2Arguments( "mergeWithWorker(...)", localCell.toString(), receivedMasterCell.toString() );
  // @todo Insert your code here

  logInfo("", "ValidateGrid Receive on rank " << tarch::parallel::Node::getInstance().getRank());

  logTraceOutWith1Argument( "mergeWithWorker(...)", localCell.toString() );
}
Exemple #3
0
void peanoclaw::mappings::Cleanup::ascend(
    peanoclaw::Cell * const    fineGridCells,
    peanoclaw::Vertex * const  fineGridVertices,
    const peano::grid::VertexEnumerator&          fineGridVerticesEnumerator,
    peanoclaw::Vertex * const  coarseGridVertices,
    const peano::grid::VertexEnumerator&          coarseGridVerticesEnumerator,
    peanoclaw::Cell&           coarseGridCell
) {
    logTraceInWith2Arguments( "ascend(...)", coarseGridCell.toString(), coarseGridVerticesEnumerator.toString() );
    // @todo Insert your code here
    logTraceOut( "ascend(...)" );
}
bool peanoclaw::ParallelSubgrid::isAdjacentToLocalSubdomain(
  const peanoclaw::Cell&               coarseGridCell,
  peanoclaw::Vertex * const            fineGridVertices,
  const peano::grid::VertexEnumerator& fineGridVerticesEnumerator
) {
  #ifdef Parallel
  bool isAdjacentToLocalDomain = !coarseGridCell.isAssignedToRemoteRank();
  for(int i = 0; i < TWO_POWER_D; i++) {
    isAdjacentToLocalDomain |= fineGridVertices[fineGridVerticesEnumerator(i)].isAdjacentToDomainOf(
        tarch::parallel::Node::getInstance().getRank()
      );
  }
  return isAdjacentToLocalDomain;
  #else
  return true;
  #endif
}
Exemple #5
0
void peanoclaw::mappings::ValidateGrid::createCell(
      peanoclaw::Cell&                 fineGridCell,
      peanoclaw::Vertex * const        fineGridVertices,
      const peano::grid::VertexEnumerator&                fineGridVerticesEnumerator,
      peanoclaw::Vertex * const        coarseGridVertices,
      const peano::grid::VertexEnumerator&                coarseGridVerticesEnumerator,
      peanoclaw::Cell&                 coarseGridCell,
      const tarch::la::Vector<DIMENSIONS,int>&                             fineGridPositionOfCell
) {
  logTraceInWith4Arguments( "createCell(...)", fineGridCell, fineGridVerticesEnumerator.toString(), coarseGridCell, fineGridPositionOfCell );

  //TODO unterweg debug
  logInfo("", "Creating cell " << fineGridVerticesEnumerator.getVertexPosition(0) << ", "
      << fineGridVerticesEnumerator.getCellSize()
      << ", index=" << fineGridCell.getCellDescriptionIndex()
      << ",level=" << fineGridVerticesEnumerator.getLevel()
      );

  logTraceOutWith1Argument( "createCell(...)", fineGridCell );
}
void peanoclaw::parallel::MasterWorkerAndForkJoinCommunicator::mergeCellDuringForkOrJoin(
  peanoclaw::Cell&                      localCell,
  const peanoclaw::Cell&                remoteCell,
  tarch::la::Vector<DIMENSIONS, double> cellSize,
  const peanoclaw::State&               state
) {
  #ifdef Parallel
  bool isForking = !tarch::parallel::Node::getInstance().isGlobalMaster()
                   && _remoteRank == tarch::parallel::NodePool::getInstance().getMasterRank();

  if(localCell.isInside()
      && (
           ( isForking && !remoteCell.isAssignedToRemoteRank())
        || (!isForking && remoteCell.getRankOfRemoteNode() == _remoteRank)
      )
    ) {
    if(localCell.isRemote(state, false, false)) {
      if(tarch::parallel::NodePool::getInstance().getMasterRank() != 0) {
        assertionEquals2(localCell.getCellDescriptionIndex(), -2, _position, _level);
      }
      Patch temporaryPatch(
        _position - cellSize * 0.5,
        cellSize,
        0,
        0,
        0,
        1,
        1,
        0.0,
        _level
      );

      receivePatch(temporaryPatch.getCellDescriptionIndex());
      temporaryPatch.reloadCellDescription();

      if(temporaryPatch.isLeaf()) {
        temporaryPatch.switchToVirtual();
      }
      if(temporaryPatch.isVirtual()) {
        temporaryPatch.switchToNonVirtual();
      }
      CellDescriptionHeap::getInstance().deleteData(temporaryPatch.getCellDescriptionIndex());
    } else {
      assertion2(localCell.getCellDescriptionIndex() != -1, _position, _level);

      Patch localPatch(localCell);

      assertion2(
        (!localPatch.isLeaf() && !localPatch.isVirtual())
        || localPatch.getUIndex() >= 0,
        _position,
        _level
      );

      receivePatch(localPatch.getCellDescriptionIndex());
      localPatch.loadCellDescription(localCell.getCellDescriptionIndex());

      assertion1(!localPatch.isRemote(), localPatch);

      //TODO unterweg dissertation: Wenn auf dem neuen Knoten die Adjazenzinformationen auf den
      // Vertices noch nicht richtig gesetzt sind koennen wir nicht gleich voranschreiten.
      // U.U. brauchen wir sogar 2 Iterationen ohne Aktion... (Wegen hin- und herlaufen).
      localPatch.setSkipNextGridIteration(2);

      assertionEquals1(localPatch.getLevel(), _level, localPatch);
    }
  }
  #endif
}