Пример #1
0
static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
{
  PetscErrorCode ierr;
  PC_Redundant   *red = (PC_Redundant*)pc->data;
  MPI_Comm       comm,subcomm;
  const char     *prefix;

  PetscFunctionBegin;
  if (!red->psubcomm) {
    ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
    ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
    ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
    ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
    ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);

    /* create a new PC that processors in each subcomm have copy of */
    subcomm = PetscSubcommChild(red->psubcomm);

    ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
    ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
    ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
    ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
    ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
    ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
    ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);

    ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
    ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
  }
  *innerksp = red->ksp;
  PetscFunctionReturn(0);
}
Пример #2
0
EXTERN_C_BEGIN
#undef __FUNCT__
#define __FUNCT__ "PCCreate_Redistribute"
PetscErrorCode  PCCreate_Redistribute(PC pc)
{
  PetscErrorCode  ierr;
  PC_Redistribute *red;
  const char      *prefix;

  PetscFunctionBegin;
  ierr = PetscNewLog(pc,PC_Redistribute,&red);CHKERRQ(ierr);
  pc->data            = (void*)red;

  pc->ops->apply           = PCApply_Redistribute;
  pc->ops->applytranspose  = 0;
  pc->ops->setup           = PCSetUp_Redistribute;
  pc->ops->destroy         = PCDestroy_Redistribute;
  pc->ops->setfromoptions  = PCSetFromOptions_Redistribute;
  pc->ops->view            = PCView_Redistribute;

  ierr = KSPCreate(((PetscObject)pc)->comm,&red->ksp);CHKERRQ(ierr);
  ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
  ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
  ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
  ierr = KSPAppendOptionsPrefix(red->ksp,"redistribute_");CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #3
0
EXTERN_C_BEGIN
#undef __FUNCT__  
#define __FUNCT__ "PCRedundantGetPC_Redundant"
PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
{
  PetscErrorCode ierr;
  PC_Redundant   *red = (PC_Redundant*)pc->data;
  MPI_Comm       comm,subcomm;
  const char     *prefix;

  PetscFunctionBegin;
  if (!red->psubcomm) {
    ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
    ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
    ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);

    /* create a new PC that processors in each subcomm have copy of */
    subcomm = red->psubcomm->comm;
    ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
    ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
    ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
    ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
    ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);

    ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 
    ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 
  }

  ierr = KSPGetPC(red->ksp,innerpc);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #4
0
static PetscErrorCode PETSCKSP_DLLEXPORT PCKSPCreateKSP_KSP(PC pc)
{
  PetscErrorCode ierr;
  const char     *prefix;
  PC_KSP         *jac = (PC_KSP *)pc->data;

  PetscFunctionBegin;
 ierr  = KSPCreate(((PetscObject)pc)->comm,&jac->ksp);CHKERRQ(ierr);
  ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
  ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
  ierr = KSPSetOptionsPrefix(jac->ksp,prefix);CHKERRQ(ierr);
  ierr = KSPAppendOptionsPrefix(jac->ksp,"ksp_");CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #5
0
static PetscErrorCode  PCKSPCreateKSP_KSP(PC pc)
{
  PetscErrorCode ierr;
  const char     *prefix;
  PC_KSP         *jac = (PC_KSP*)pc->data;

  PetscFunctionBegin;
  ierr = KSPCreate(PetscObjectComm((PetscObject)pc),&jac->ksp);CHKERRQ(ierr);
  ierr = KSPSetErrorIfNotConverged(jac->ksp,pc->erroriffailure);CHKERRQ(ierr);
  ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
  ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
  ierr = KSPSetOptionsPrefix(jac->ksp,prefix);CHKERRQ(ierr);
  ierr = KSPAppendOptionsPrefix(jac->ksp,"ksp_");CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #6
0
PetscErrorCode PCSetFromOptions_Exotic(PetscOptions *PetscOptionsObject,PC pc)
{
    PetscErrorCode ierr;
    PetscBool      flg;
    PC_MG          *mg = (PC_MG*)pc->data;
    PCExoticType   mgctype;
    PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;

    PetscFunctionBegin;
    ierr = PetscOptionsHead(PetscOptionsObject,"Exotic coarse space options");
    CHKERRQ(ierr);
    ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);
    CHKERRQ(ierr);
    if (flg) {
        ierr = PCExoticSetType(pc,mgctype);
        CHKERRQ(ierr);
    }
    ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL);
    CHKERRQ(ierr);
    if (!ctx->directSolve) {
        if (!ctx->ksp) {
            const char *prefix;
            ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);
            CHKERRQ(ierr);
            ierr = KSPSetErrorIfNotConverged(ctx->ksp,pc->erroriffailure);
            CHKERRQ(ierr);
            ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);
            CHKERRQ(ierr);
            ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ctx->ksp);
            CHKERRQ(ierr);
            ierr = PCGetOptionsPrefix(pc,&prefix);
            CHKERRQ(ierr);
            ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);
            CHKERRQ(ierr);
            ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");
            CHKERRQ(ierr);
        }
        ierr = KSPSetFromOptions(ctx->ksp);
        CHKERRQ(ierr);
    }
    ierr = PetscOptionsTail();
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Пример #7
0
static PetscErrorCode  PCCompositeAddPC_Composite(PC pc,PCType type)
{
  PC_Composite     *jac;
  PC_CompositeLink next,ilink;
  PetscErrorCode   ierr;
  PetscInt         cnt = 0;
  const char       *prefix;
  char             newprefix[8];

  PetscFunctionBegin;
  ierr        = PetscNewLog(pc,&ilink);CHKERRQ(ierr);
  ilink->next = 0;
  ierr        = PCCreate(PetscObjectComm((PetscObject)pc),&ilink->pc);CHKERRQ(ierr);
  ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)ilink->pc);CHKERRQ(ierr);

  jac  = (PC_Composite*)pc->data;
  next = jac->head;
  if (!next) {
    jac->head       = ilink;
    ilink->previous = NULL;
  } else {
    cnt++;
    while (next->next) {
      next = next->next;
      cnt++;
    }
    next->next      = ilink;
    ilink->previous = next;
  }
  ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
  ierr = PCSetOptionsPrefix(ilink->pc,prefix);CHKERRQ(ierr);
  sprintf(newprefix,"sub_%d_",(int)cnt);
  ierr = PCAppendOptionsPrefix(ilink->pc,newprefix);CHKERRQ(ierr);
  /* type is set after prefix, because some methods may modify prefix, e.g. pcksp */
  ierr = PCSetType(ilink->pc,type);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #8
0
Файл: mg.c Проект: ziolai/petsc
/*@C
   PCMGSetLevels - Sets the number of levels to use with MG.
   Must be called before any other MG routine.

   Logically Collective on PC

   Input Parameters:
+  pc - the preconditioner context
.  levels - the number of levels
-  comms - optional communicators for each level; this is to allow solving the coarser problems
           on smaller sets of processors. Use NULL_OBJECT for default in Fortran

   Level: intermediate

   Notes:
     If the number of levels is one then the multigrid uses the -mg_levels prefix
  for setting the level options rather than the -mg_coarse prefix.

.keywords: MG, set, levels, multigrid

.seealso: PCMGSetType(), PCMGGetLevels()
@*/
PetscErrorCode  PCMGSetLevels(PC pc,PetscInt levels,MPI_Comm *comms)
{
  PetscErrorCode ierr;
  PC_MG          *mg        = (PC_MG*)pc->data;
  MPI_Comm       comm;
  PC_MG_Levels   **mglevels = mg->levels;
  PCMGType       mgtype     = mg->am;
  PetscInt       mgctype    = (PetscInt) PC_MG_CYCLE_V;
  PetscInt       i;
  PetscMPIInt    size;
  const char     *prefix;
  PC             ipc;
  PetscInt       n;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(pc,PC_CLASSID,1);
  PetscValidLogicalCollectiveInt(pc,levels,2);
  ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
  if (mg->nlevels == levels) PetscFunctionReturn(0);
  if (mglevels) {
    mgctype = mglevels[0]->cycles;
    /* changing the number of levels so free up the previous stuff */
    ierr = PCReset_MG(pc);CHKERRQ(ierr);
    n    = mglevels[0]->levels;
    for (i=0; i<n; i++) {
      if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
        ierr = KSPDestroy(&mglevels[i]->smoothd);CHKERRQ(ierr);
      }
      ierr = KSPDestroy(&mglevels[i]->smoothu);CHKERRQ(ierr);
      ierr = PetscFree(mglevels[i]);CHKERRQ(ierr);
    }
    ierr = PetscFree(mg->levels);CHKERRQ(ierr);
  }

  mg->nlevels = levels;

  ierr = PetscMalloc1(levels,&mglevels);CHKERRQ(ierr);
  ierr = PetscLogObjectMemory((PetscObject)pc,levels*(sizeof(PC_MG*)));CHKERRQ(ierr);

  ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);

  mg->stageApply = 0;
  for (i=0; i<levels; i++) {
    ierr = PetscNewLog(pc,&mglevels[i]);CHKERRQ(ierr);

    mglevels[i]->level               = i;
    mglevels[i]->levels              = levels;
    mglevels[i]->cycles              = mgctype;
    mg->default_smoothu              = 2;
    mg->default_smoothd              = 2;
    mglevels[i]->eventsmoothsetup    = 0;
    mglevels[i]->eventsmoothsolve    = 0;
    mglevels[i]->eventresidual       = 0;
    mglevels[i]->eventinterprestrict = 0;

    if (comms) comm = comms[i];
    ierr = KSPCreate(comm,&mglevels[i]->smoothd);CHKERRQ(ierr);
    ierr = KSPSetErrorIfNotConverged(mglevels[i]->smoothd,pc->erroriffailure);CHKERRQ(ierr);
    ierr = PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd,(PetscObject)pc,levels-i);CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(mglevels[i]->smoothd,prefix);CHKERRQ(ierr);
    ierr = PetscObjectComposedDataSetInt((PetscObject) mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level);CHKERRQ(ierr);
    if (i || levels == 1) {
      char tprefix[128];

      ierr = KSPSetType(mglevels[i]->smoothd,KSPCHEBYSHEV);CHKERRQ(ierr);
      ierr = KSPSetConvergenceTest(mglevels[i]->smoothd,KSPConvergedSkip,NULL,NULL);CHKERRQ(ierr);
      ierr = KSPSetNormType(mglevels[i]->smoothd,KSP_NORM_NONE);CHKERRQ(ierr);
      ierr = KSPGetPC(mglevels[i]->smoothd,&ipc);CHKERRQ(ierr);
      ierr = PCSetType(ipc,PCSOR);CHKERRQ(ierr);
      ierr = KSPSetTolerances(mglevels[i]->smoothd,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT, mg->default_smoothd);CHKERRQ(ierr);

      sprintf(tprefix,"mg_levels_%d_",(int)i);
      ierr = KSPAppendOptionsPrefix(mglevels[i]->smoothd,tprefix);CHKERRQ(ierr);
    } else {
      ierr = KSPAppendOptionsPrefix(mglevels[0]->smoothd,"mg_coarse_");CHKERRQ(ierr);

      /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
      ierr = KSPSetType(mglevels[0]->smoothd,KSPPREONLY);CHKERRQ(ierr);
      ierr = KSPGetPC(mglevels[0]->smoothd,&ipc);CHKERRQ(ierr);
      ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
      if (size > 1) {
        ierr = PCSetType(ipc,PCREDUNDANT);CHKERRQ(ierr);
      } else {
        ierr = PCSetType(ipc,PCLU);CHKERRQ(ierr);
      }
      ierr = PCFactorSetShiftType(ipc,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
    }
    ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mglevels[i]->smoothd);CHKERRQ(ierr);

    mglevels[i]->smoothu = mglevels[i]->smoothd;
    mg->rtol             = 0.0;
    mg->abstol           = 0.0;
    mg->dtol             = 0.0;
    mg->ttol             = 0.0;
    mg->cyclesperpcapply = 1;
  }
  mg->levels = mglevels;
  ierr = PCMGSetType(pc,mgtype);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #9
0
static PetscErrorCode PCSetUp_Redundant(PC pc)
{
  PC_Redundant   *red = (PC_Redundant*)pc->data;
  PetscErrorCode ierr;
  PetscInt       mstart,mend,mlocal,M;
  PetscMPIInt    size;
  MPI_Comm       comm,subcomm;
  Vec            x;
  const char     *prefix;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
  
  /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  if (size == 1) red->useparallelmat = PETSC_FALSE;

  if (!pc->setupcalled) {
    PetscInt mloc_sub;
    if (!red->psubcomm) {
      ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
      ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
      ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
      /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
      ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
      ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);

      /* create a new PC that processors in each subcomm have copy of */
      subcomm = PetscSubcommChild(red->psubcomm);

      ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
      ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
      ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
      ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
      ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
      ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
      ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);

      ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
      ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
      ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
    } else {
      subcomm = PetscSubcommChild(red->psubcomm);
    }

    if (red->useparallelmat) {
      /* grab the parallel matrix and put it into processors of a subcomminicator */
      ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
      ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
       
      /* get working vectors xsub and ysub */
      ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);

      /* create working vectors xdup and ydup.
       xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
       ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
       Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
      ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
      ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
      ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);

      /* create vecscatters */
      if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
        IS       is1,is2;
        PetscInt *idx1,*idx2,i,j,k;

        ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
        ierr = VecGetSize(x,&M);CHKERRQ(ierr);
        ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
        mlocal = mend - mstart;
        ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
        j    = 0;
        for (k=0; k<red->psubcomm->n; k++) {
          for (i=mstart; i<mend; i++) {
            idx1[j]   = i;
            idx2[j++] = i + M*k;
          }
        }
        ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
        ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
        ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
        ierr = ISDestroy(&is1);CHKERRQ(ierr);
        ierr = ISDestroy(&is2);CHKERRQ(ierr);

        /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
        ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
        ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
        ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
        ierr = ISDestroy(&is1);CHKERRQ(ierr);
        ierr = ISDestroy(&is2);CHKERRQ(ierr);
        ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
        ierr = VecDestroy(&x);CHKERRQ(ierr);
      }
    } else { /* !red->useparallelmat */
      ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
    }
  } else { /* pc->setupcalled */
    if (red->useparallelmat) {
      MatReuse       reuse;
      /* grab the parallel matrix and put it into processors of a subcomminicator */
      /*--------------------------------------------------------------------------*/
      if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
        /* destroy old matrices */
        ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
        reuse = MAT_INITIAL_MATRIX;
      } else {
        reuse = MAT_REUSE_MATRIX;
      }
      ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
      ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
    } else { /* !red->useparallelmat */
      ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
    }
  }

  if (pc->setfromoptionscalled) {
    ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
  }
  ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #10
0
/*
I should not modify setup called!!
This is handled via petsc.
*/
PetscErrorCode BSSCR_PCSetUp_GtKG( PC pc )
{
	PC_GtKG    ctx = (PC_GtKG)pc->data;
	PetscReal  fill;
	Mat        Ident;
	Vec        diag;
	PetscInt   M,N, m,n;
	MPI_Comm   comm;
	PetscInt   nnz_I, nnz_G;
	const MatType mtype;
	const char *prefix;
	PetscTruth wasSetup;
	
	
	
	if( ctx->K == PETSC_NULL ) {
		Stg_SETERRQ( PETSC_ERR_SUP, "gtkg: K not set" );
	}
	if( ctx->G == PETSC_NULL ) {
		Stg_SETERRQ( PETSC_ERR_SUP, "gtkg: G not set" );
	}
	
	PetscObjectGetComm( (PetscObject)ctx->K, &comm ); 
	
	
	/* Check for existence of objects and trash any which exist */
	if( ctx->form_GtG == PETSC_TRUE && ctx->GtG != PETSC_NULL ) {
		Stg_MatDestroy(&ctx->GtG );
		ctx->GtG = PETSC_NULL;
	}
	
	if( ctx->s != PETSC_NULL ) {
		Stg_VecDestroy(&ctx->s );
		ctx->s = PETSC_NULL;
	}
	if( ctx->X != PETSC_NULL ) {
		Stg_VecDestroy(&ctx->X );
		ctx->X = PETSC_NULL;
	}
	if( ctx->t != PETSC_NULL ) {
		Stg_VecDestroy(&ctx->t );
		ctx->t = PETSC_NULL;
	}
	if( ctx->inv_diag_M != PETSC_NULL ) {
		Stg_VecDestroy(&ctx->inv_diag_M );
		ctx->inv_diag_M = PETSC_NULL;
	}
	
	
	
	/* Create vectors */
	MatGetVecs( ctx->K, &ctx->s, &ctx->X );
	MatGetVecs( ctx->G, &ctx->t, PETSC_NULL );
	
	if( ctx->M != PETSC_NULL ) {
		MatGetVecs( ctx->K, &ctx->inv_diag_M, PETSC_NULL );
		MatGetDiagonal( ctx->M, ctx->inv_diag_M );
		VecReciprocal( ctx->inv_diag_M );
		
		/* change the pc_apply routines */
		pc->ops->apply          = BSSCR_BSSCR_PCApply_GtKG_diagonal_scaling;
		pc->ops->applytranspose = BSSCR_BSSCR_PCApplyTranspose_GtKG_diagonal_scaling;
	}
	
	
	/* Assemble GtG */
	MatGetSize( ctx->G, &M, &N );
	MatGetLocalSize( ctx->G, &m, &n );
	
	MatGetVecs( ctx->G, PETSC_NULL, &diag );
	VecSet( diag, 1.0 );
	
	MatCreate( comm, &Ident );
	MatSetSizes( Ident, m,m , M, M );
#if (((PETSC_VERSION_MAJOR==3) && (PETSC_VERSION_MINOR>=3)) || (PETSC_VERSION_MAJOR>3) )
        MatSetUp(Ident);
#endif

	MatGetType( ctx->G, &mtype );
	MatSetType( Ident, mtype );
	
	if( ctx->M == PETSC_NULL ) {
		MatDiagonalSet( Ident, diag, INSERT_VALUES );
	}
	else {
		MatDiagonalSet( Ident, ctx->inv_diag_M, INSERT_VALUES );
	}
	
	BSSCR_get_number_nonzeros_AIJ( Ident, &nnz_I );
	BSSCR_get_number_nonzeros_AIJ( ctx->G, &nnz_G );
	//fill = 1.0;
	/* 
	Not sure the best way to estimate the fill factor.
	GtG is a laplacian on the pressure space. 
	This might tell us something useful...
	*/
	fill = (PetscReal)(nnz_G)/(PetscReal)( nnz_I );
	MatPtAP( Ident, ctx->G, MAT_INITIAL_MATRIX, fill, &ctx->GtG );
	
	Stg_MatDestroy(&Ident);
	Stg_VecDestroy(&diag );
	
	
	Stg_KSPSetOperators( ctx->ksp, ctx->GtG, ctx->GtG, SAME_NONZERO_PATTERN );
	
	if (!pc->setupcalled) {	
		wasSetup = PETSC_FALSE;
		
		PCGetOptionsPrefix( pc,&prefix );
		KSPSetOptionsPrefix( ctx->ksp, prefix );
		KSPAppendOptionsPrefix( ctx->ksp, "pc_gtkg_" ); /* -pc_GtKG_ksp_type <type>, -ksp_GtKG_pc_type <type> */
	}
	else {
		wasSetup = PETSC_TRUE;
	}	
	
	
//	if (!wasSetup && pc->setfromoptionscalled) {
	if (!wasSetup) {
		KSPSetFromOptions(ctx->ksp);
	}
	
	
	PetscFunctionReturn(0);
}
Пример #11
0
static PetscErrorCode PCSetUp_Redundant(PC pc)
{
  PC_Redundant   *red = (PC_Redundant*)pc->data;
  PetscErrorCode ierr;
  PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
  PetscMPIInt    size;
  MatReuse       reuse = MAT_INITIAL_MATRIX;
  MatStructure   str = DIFFERENT_NONZERO_PATTERN;
  MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
  Vec            vec;
  PetscMPIInt    subsize,subrank;
  const char     *prefix;
  const PetscInt *range;

  PetscFunctionBegin;
  ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
  ierr = VecGetSize(vec,&m);CHKERRQ(ierr);

  if (!pc->setupcalled) {
    if (!red->psubcomm) {
      ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
      ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);

      /* create a new PC that processors in each subcomm have copy of */
      subcomm = red->psubcomm->comm;
      ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
      ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
      ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
      ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
      ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
      ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);

      ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
      ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 
      ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr); 
    } else {
       subcomm = red->psubcomm->comm;
    }

    /* create working vectors xsub/ysub and xdup/ydup */
    ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);  
    ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);

    /* get local size of xsub/ysub */    
    ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
    ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
    ierr = MatGetOwnershipRanges(pc->pmat,&range);CHKERRQ(ierr);
    rstart_sub = range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */    
    if (subrank+1 < subsize){
      rend_sub = range[red->psubcomm->n*(subrank+1)];
    } else {
      rend_sub = m; 
    }
    mloc_sub = rend_sub - rstart_sub;
    ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
    /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
    ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);

    /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it. 
       Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */      
    ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
    ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
  
    /* create vec scatters */
    if (!red->scatterin){
      IS       is1,is2;
      PetscInt *idx1,*idx2,i,j,k; 

      ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
      j = 0;
      for (k=0; k<red->psubcomm->n; k++){
        for (i=mstart; i<mend; i++){
          idx1[j]   = i;
          idx2[j++] = i + m*k;
        }
      }
      ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
      ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);      
      ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
      ierr = ISDestroy(is1);CHKERRQ(ierr);
      ierr = ISDestroy(is2);CHKERRQ(ierr);

      ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
      ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
      ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);      
      ierr = ISDestroy(is1);CHKERRQ(ierr);
      ierr = ISDestroy(is2);CHKERRQ(ierr);
      ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
    }
  }
  ierr = VecDestroy(vec);CHKERRQ(ierr);

  /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  if (size == 1) {
    red->useparallelmat = PETSC_FALSE;
  }

  if (red->useparallelmat) {
    if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
      /* destroy old matrices */
      if (red->pmats) {
        ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
      }
    } else if (pc->setupcalled == 1) {
      reuse = MAT_REUSE_MATRIX;
      str   = SAME_NONZERO_PATTERN;
    }
       
    /* grab the parallel matrix and put it into processors of a subcomminicator */ 
    /*--------------------------------------------------------------------------*/
    ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);  
    ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
    /* tell PC of the subcommunicator its operators */
    ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
  } else {
    ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
  }
  if (pc->setfromoptionscalled){
    ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr); 
  }
  ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #12
0
/*
Builds 
  B Y1 X1 Bt 
and creates a ksp when C=0, otherwise it builds 
  B Y1 X1 Bt - C
*/
PetscErrorCode BSSCR_PCScGtKGUseStandardBBtOperator( PC pc ) 
{
	PC_SC_GtKG ctx = (PC_SC_GtKG)pc->data;
	PetscReal  fill;
	Mat        diag_mat,C;
	Vec        diag;
	PetscInt   M,N, m,n;
	MPI_Comm   comm;
	PetscInt   nnz_I, nnz_G;
	MatType    mtype;
	const char *prefix;
	Mat BBt;
	KSP ksp;
	PetscTruth ivalue, flg, has_cnst_nullsp;
	
	
	BSSCR_BSSCR_pc_error_ScGtKG( pc, __func__ );
	
	/* Assemble BBt */
	MatGetSize( ctx->Bt, &M, &N );
	MatGetLocalSize( ctx->Bt, &m, &n );
	
	MatGetVecs( ctx->Bt, PETSC_NULL, &diag );
	
	/* Define diagonal matrix Y1 X1 */
	VecPointwiseMult( diag, ctx->Y1, ctx->X1 );
	
	PetscObjectGetComm( (PetscObject)ctx->F, &comm ); 
	MatCreate( comm, &diag_mat );
	MatSetSizes( diag_mat, m,m , M, M );
#if (((PETSC_VERSION_MAJOR==3) && (PETSC_VERSION_MINOR>=3)) || (PETSC_VERSION_MAJOR>3) )
        MatSetUp(diag_mat);
#endif
	MatGetType( ctx->Bt, &mtype );
	MatSetType( diag_mat, mtype );
	
	MatDiagonalSet( diag_mat, diag, INSERT_VALUES );
	
	/* Build operator B Y1 X1 Bt */
	BSSCR_BSSCR_get_number_nonzeros_AIJ_ScGtKG( diag_mat, &nnz_I );
	BSSCR_BSSCR_get_number_nonzeros_AIJ_ScGtKG( ctx->Bt, &nnz_G );
	/* 
	Not sure the best way to estimate the fill factor.
	BBt is a laplacian on the pressure space. 
	This might tell us something useful...
	*/
	fill = (PetscReal)(nnz_G)/(PetscReal)( nnz_I );
	MatPtAP( diag_mat, ctx->Bt, MAT_INITIAL_MATRIX, fill, &BBt );
	
	Stg_MatDestroy(&diag_mat );
	Stg_VecDestroy(&diag );
	
	
	C = ctx->C;
	if( C !=PETSC_NULL ) {
		MatAXPY( BBt, -1.0, C, DIFFERENT_NONZERO_PATTERN );
	}
	
	
	/* Build the solver */
	KSPCreate( ((PetscObject)pc)->comm, &ksp );
	
	Stg_KSPSetOperators( ksp, BBt, BBt, SAME_NONZERO_PATTERN );
	
	PCGetOptionsPrefix( pc,&prefix );
	KSPSetOptionsPrefix( ksp, prefix );
	KSPAppendOptionsPrefix( ksp, "pc_gtkg_" ); /* -pc_GtKG_ksp_type <type>, -ksp_GtKG_pc_type <type> */
	
	BSSCR_PCScGtKGSetKSP( pc, ksp );
	
	BSSCR_MatContainsConstNullSpace( BBt, NULL, &has_cnst_nullsp );
	if( has_cnst_nullsp == PETSC_TRUE ) {
		BSSCR_PCScGtKGAttachNullSpace( pc );
	}
	
	PetscOptionsGetTruth( PETSC_NULL, "-pc_gtkg_monitor", &ivalue, &flg );
	BSSCR_PCScGtKGSetSubKSPMonitor( pc, ivalue );
	
	Stg_KSPDestroy(&ksp);
	Stg_MatDestroy(&BBt);
	
	PetscFunctionReturn(0);
}