コード例 #1
0
ファイル: hierarchical.c プロジェクト: mchandra/petsc
PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat adj, IS fineparts, ISLocalToGlobalMapping mapping, IS *sfineparts)
{
  PetscInt            *local_indices, *global_indices,*owners,*sfineparts_indices,localsize,i;
  const PetscInt      *ranges,*fineparts_indices;
  PetscMPIInt         rank;
  MPI_Comm            comm;
  PetscLayout         rmap;
  PetscSFNode        *remote;
  PetscSF             sf;
  PetscErrorCode      ierr;

  PetscFunctionBegin;
  /*get communicator */
  ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
  ierr = ISGetLocalSize(fineparts,&localsize);CHKERRQ(ierr);
  ierr = PetscCalloc2(localsize,&global_indices,localsize,&local_indices);CHKERRQ(ierr);
  for(i=0; i<localsize; i++){
	local_indices[i] = i;
  }
  /*global indices */
  ierr = ISLocalToGlobalMappingApply(mapping,localsize,local_indices,global_indices);CHKERRQ(ierr);
  ierr = PetscCalloc1(localsize,&owners);CHKERRQ(ierr);
  /*find owners for global indices */
  for(i=0; i<localsize; i++){
	ierr = PetscLayoutFindOwner(rmap,global_indices[i],&owners[i]);CHKERRQ(ierr);
  }
  /*ranges */
  ierr = PetscLayoutGetRanges(rmap,&ranges);CHKERRQ(ierr);
  ierr = PetscCalloc1(ranges[rank+1]-ranges[rank],&sfineparts_indices);CHKERRQ(ierr);
  ierr = ISGetIndices(fineparts,&fineparts_indices);CHKERRQ(ierr);
  /*create a SF to exchange data */
  ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
  ierr = PetscCalloc1(localsize,&remote);CHKERRQ(ierr);
  for(i=0; i<localsize; i++){
	remote[i].rank  = owners[i];
	remote[i].index = global_indices[i]-ranges[owners[i]];
  }
  ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
  /*not sure how to add prefix to sf*/
  ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(sf,localsize,localsize,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
  ierr = PetscSFReduceBegin(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);CHKERRQ(ierr);
  ierr = PetscSFReduceEnd(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
  ierr = ISRestoreIndices(fineparts,&fineparts_indices);CHKERRQ(ierr);
  /* comm self */
  ierr = ISCreateGeneral(comm,ranges[rank+1]-ranges[rank],sfineparts_indices,PETSC_OWN_POINTER,sfineparts);CHKERRQ(ierr);
  ierr = PetscFree2(global_indices,local_indices);CHKERRQ(ierr);
  ierr = PetscFree(owners);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
コード例 #2
0
ファイル: zerorows.c プロジェクト: firedrakeproject/petsc
/* this function maps rows to locally owned rows */ 
PETSC_INTERN PetscErrorCode MatZeroRowsMapLocal_Private(Mat A,PetscInt N,const PetscInt *rows,PetscInt *nr,PetscInt **olrows)
{
  PetscInt      *owners = A->rmap->range;
  PetscInt       n      = A->rmap->n;
  PetscSF        sf;
  PetscInt      *lrows;
  PetscSFNode   *rrows;
  PetscMPIInt    rank;
  PetscInt       r, p = 0, len = 0;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  /* Create SF where leaves are input rows and roots are owned rows */
  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
  ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
  for (r = 0; r < n; ++r) lrows[r] = -1;
  if (!A->nooffproczerorows) {ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);}
  for (r = 0; r < N; ++r) {
    const PetscInt idx   = rows[r];
    if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
    if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
      ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
    }
    if (A->nooffproczerorows) {
      if (p != rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"MAT_NO_OFF_PROC_ZERO_ROWS set, but row %D is not owned by rank %d",idx,rank);
      lrows[len++] = idx - owners[p];
    } else {
      rrows[r].rank = p;
      rrows[r].index = rows[r] - owners[p];
    }
  }
  if (!A->nooffproczerorows) {
    ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
    ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
    /* Collect flags for rows to be zeroed */
    ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt*)rows, lrows, MPI_LOR);CHKERRQ(ierr);
    ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt*)rows, lrows, MPI_LOR);CHKERRQ(ierr);
    ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
    /* Compress and put in row numbers */
    for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
  }
  if (nr) *nr = len;
  if (olrows) *olrows = lrows;
  PetscFunctionReturn(0);
}
コード例 #3
0
ファイル: pmap.c プロジェクト: feelpp/debian-petsc
/*@C
   PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout

   Collective

   Input Arguments:
+  sf - star forest
.  layout - PetscLayout defining the global space
.  nleaves - number of leaf vertices on the current process, each of these references a root on any process
.  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
-  iremote - remote locations of root vertices for each leaf on the current process

   Level: intermediate

.seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
@*/
PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
{
  PetscErrorCode ierr;
  PetscInt       i,nroots;
  PetscSFNode    *remote;

  PetscFunctionBegin;
  ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
  ierr = PetscMalloc(nleaves*sizeof(PetscSFNode),&remote);CHKERRQ(ierr);
  for (i=0; i<nleaves; i++) {
    PetscInt owner = -1;
    ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
    remote[i].rank  = owner;
    remote[i].index = iremote[i] - layout->range[owner];
  }
  ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
コード例 #4
0
ファイル: mpiadj.c プロジェクト: pombredanne/petsc
/*
 * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
 * */
static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
{
  PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
  PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
  PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
  const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
  Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
  PetscLayout        rmap;
  MPI_Comm           comm;
  PetscSF            sf;
  PetscSFNode       *iremote;
  PetscBool          done;
  PetscErrorCode     ierr;

  PetscFunctionBegin;
  /* communicator */
  ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
  /* Layouts */
  ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
  /* get rows information */
  ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
  ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
  ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
  /* construct sf graph*/
  nleaves = nlrows_is;
  for(i=0; i<nlrows_is; i++){
	owner = -1;
	rlocalindex = -1;
    ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
    iremote[i].rank  = owner;
    iremote[i].index = rlocalindex;
  }
  ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
  ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
  nroots = nlrows_mat;
  for(i=0; i<nlrows_mat; i++){
	ncols_send[i] = xadj[i+1]-xadj[i];
  }
  ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
  ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
  ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
  Ncols_recv =0;
  for(i=0; i<nlrows_is; i++){
	 Ncols_recv             += ncols_recv[i];
	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
  }
  Ncols_send = 0;
  for(i=0; i<nlrows_mat; i++){
	Ncols_send += ncols_send[i];
  }
  ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
  ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
  nleaves = Ncols_recv;
  Ncols_recv = 0;
  for(i=0; i<nlrows_is; i++){
    ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
    for(j=0; j<ncols_recv[i]; j++){
      iremote[Ncols_recv].rank    = owner;
      iremote[Ncols_recv++].index = xadj_recv[i]+j;
    }
  }
  ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
  /*if we need to deal with edge weights ???*/
  if(a->values){isvalue=1;}else{isvalue=0;}
  /*involve a global communication */
  /*ierr = MPI_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
  if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
  nroots = Ncols_send;
  ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
  ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
  ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
  if(isvalue){
	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
  }
  ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
  ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
  ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
  ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
  rnclos = 0;
  for(i=0; i<nlrows_is; i++){
    for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
      ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
      if(loc<0){
        adjncy_recv[j] = -1;
        if(isvalue) values_recv[j] = -1;
        ncols_recv[i]--;
      }else{
    	rnclos++;
      }
    }
  }
  ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
  ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
  if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
  ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
  rnclos = 0;
  for(i=0; i<nlrows_is; i++){
	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
	  if(adjncy_recv[j]<0) continue;
	  sadjncy[rnclos] = adjncy_recv[j];
	  if(isvalue) svalues[rnclos] = values_recv[j];
	  rnclos++;
	}
  }
  for(i=0; i<nlrows_is; i++){
	sxadj[i+1] = sxadj[i]+ncols_recv[i];
  }
  if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
  if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
  if(sadj_values){
	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
  }else{
	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
  }
  ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
  ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
  if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
  PetscFunctionReturn(0);
}