예제 #1
0
파일: zmpiadjf.c 프로젝트: 00liujj/petsc
PETSC_EXTERN void PETSC_STDCALL matcreatempiadj_(MPI_Comm *comm,PetscInt *m,PetscInt *n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A,PetscErrorCode *ierr)
{
  Mat_MPIAdj *adj;

  CHKFORTRANNULLINTEGER(values);
  *ierr        = MatCreateMPIAdj(MPI_Comm_f2c(*(MPI_Fint*)&*comm),*m,*n,i,j,values,A);
  adj          = (Mat_MPIAdj*)(*A)->data;
  adj->freeaij = PETSC_FALSE;
}
예제 #2
0
파일: mpiadj.c 프로젝트: pombredanne/petsc
static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
{
  Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
  PetscErrorCode ierr;
  const PetscInt *ranges;
  MPI_Comm       acomm,bcomm;
  MPI_Group      agroup,bgroup;
  PetscMPIInt    i,rank,size,nranks,*ranks;

  PetscFunctionBegin;
  *B    = NULL;
  ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
  ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
  ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
  ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
  for (i=0,nranks=0; i<size; i++) {
    if (ranges[i+1] - ranges[i] > 0) nranks++;
  }
  if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
    ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
    *B   = A;
    PetscFunctionReturn(0);
  }

  ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
  for (i=0,nranks=0; i<size; i++) {
    if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
  }
  ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
  ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
  ierr = PetscFree(ranks);CHKERRQ(ierr);
  ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
  ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
  ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
  if (bcomm != MPI_COMM_NULL) {
    PetscInt   m,N;
    Mat_MPIAdj *b;
    ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
    ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
    ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
    b          = (Mat_MPIAdj*)(*B)->data;
    b->freeaij = PETSC_FALSE;
    ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
예제 #3
0
파일: ex11.c 프로젝트: feelpp/debian-petsc
int main(int argc,char **args)
{
  Mat             mesh,dual;
  PetscErrorCode  ierr;
  PetscInt        Nvertices = 6;       /* total number of vertices */
  PetscInt        ncells    = 2;       /* number cells on this process */
  PetscInt        *ii,*jj;
  PetscMPIInt     size,rank;
  MatPartitioning part;
  IS              is;

  PetscInitialize(&argc,&args,(char*)0,help);
  ierr = MPI_Comm_size(MPI_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"This example is for exactly two processes");
  ierr = MPI_Comm_rank(MPI_COMM_WORLD,&rank);CHKERRQ(ierr);

  ierr  = PetscMalloc(3*sizeof(PetscInt),&ii);CHKERRQ(ierr);
  ierr  = PetscMalloc(6*sizeof(PetscInt),&jj);CHKERRQ(ierr);
  ii[0] = 0; ii[1] = 3; ii[2] = 6;
  if (!rank) {
    jj[0] = 0; jj[1] = 1; jj[2] = 2; jj[3] = 1; jj[4] = 3; jj[5] = 2;
  } else {
    jj[0] = 1; jj[1] = 4; jj[2] = 5; jj[3] = 1; jj[4] = 5; jj[5] = 3;
  }
  ierr = MatCreateMPIAdj(MPI_COMM_WORLD,ncells,Nvertices,ii,jj,NULL,&mesh);CHKERRQ(ierr);
  ierr = MatMeshToCellGraph(mesh,2,&dual);CHKERRQ(ierr);
  ierr = MatView(dual,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = MatPartitioningCreate(MPI_COMM_WORLD,&part);CHKERRQ(ierr);
  ierr = MatPartitioningSetAdjacency(part,dual);CHKERRQ(ierr);
  ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
  ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr);
  ierr = ISView(is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);
  ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);

  ierr = MatDestroy(&mesh);CHKERRQ(ierr);
  ierr = MatDestroy(&dual);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return 0;
}
예제 #4
0
파일: pmetis.c 프로젝트: erdc-cm/petsc-dev
/*@
     MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
                       between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
                       cells of a mesh.

   Collective on Mat

   Input Parameter:
+     mesh - the graph that represents the mesh
-     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
                     quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

   Output Parameter:
.     dual - the dual graph

   Notes:
     Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

     The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
     number of cells, the number of columns is the number of vertices.


   Level: advanced

.seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


@*/
PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
{
  PetscErrorCode           ierr;
  PetscInt                 *newxadj,*newadjncy;
  PetscInt                 numflag=0;
  Mat_MPIAdj               *adj = (Mat_MPIAdj *)mesh->data,*newadj;
  PetscBool                flg;
  int                      status;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr);
  if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");

  CHKMEMQ;
  status = ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);CHKERRQPARMETIS(status);
  CHKMEMQ;
  ierr = MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);CHKERRQ(ierr);
  newadj = (Mat_MPIAdj *)(*dual)->data;
  newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
  PetscFunctionReturn(0);
}
예제 #5
0
/*@
     MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
                       between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
                       cells of a mesh.

   Collective on Mat

   Input Parameter:
+     mesh - the graph that represents the mesh
-     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
                     quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

   Output Parameter:
.     dual - the dual graph

   Notes:
     Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

$     Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadralaterials 4 entries,
$         tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
$         mix  tetrahedrals and hexahedrals
$     The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that row's cell.
$     The number of rows in mesh is number of cells, the number of columns is the number of vertices.


   Level: advanced

.seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


@*/
PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
{
  PetscErrorCode ierr;
  PetscInt       *newxadj,*newadjncy;
  PetscInt       numflag=0;
  Mat_MPIAdj     *adj   = (Mat_MPIAdj*)mesh->data,*newadj;
  PetscBool      flg;
  int            status;
  MPI_Comm       comm;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr);
  if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");

  ierr = PetscObjectGetComm((PetscObject)mesh,&comm);CHKERRQ(ierr);
  PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm));
  ierr   = MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);CHKERRQ(ierr);
  newadj = (Mat_MPIAdj*)(*dual)->data;

  newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
  PetscFunctionReturn(0);
}
예제 #6
0
파일: ex80.c 프로젝트: 00liujj/petsc
int main(int argc,char **args)
{
  Mat             A;
  PetscErrorCode  ierr;
  PetscMPIInt     rank,size;
  PetscInt        *ia,*ja;
  MatPartitioning part;
  IS              is,isn;

  PetscInitialize(&argc,&args,(char*)0,help);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size != 4) SETERRQ(PETSC_COMM_WORLD,1,"Must run with 4 processors");
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  ierr = PetscMalloc1(5,&ia);CHKERRQ(ierr);
  ierr = PetscMalloc1(16,&ja);CHKERRQ(ierr);
  if (!rank) {
    ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
    ja[8] = 2; ja[9] = 7;
    ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
  } else if (rank == 1) {
    ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
    ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
    ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
  } else if (rank == 2) {
    ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
    ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
    ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
  } else {
    ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
    ja[8] = 11; ja[9] = 14;
    ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
  }

  ierr = MatCreateMPIAdj(PETSC_COMM_WORLD,4,16,ia,ja,NULL,&A);CHKERRQ(ierr);
  ierr = MatView(A,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  /*
       Partition the graph of the matrix
  */
  ierr = MatPartitioningCreate(PETSC_COMM_WORLD,&part);CHKERRQ(ierr);
  ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr);
  ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
  /* get new processor owner number of each vertex */
  ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr);
  /* get new global number of each old global number */
  ierr = ISPartitioningToNumbering(is,&isn);CHKERRQ(ierr);
  ierr = ISView(isn,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);

  ierr = ISDestroy(&isn);CHKERRQ(ierr);
  ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);

  /*
       Free work space.  All PETSc objects should be destroyed when they
       are no longer needed.
  */
  ierr = MatDestroy(&A);CHKERRQ(ierr);


  ierr = PetscFinalize();
  return 0;
}
예제 #7
0
파일: mpiadj.c 프로젝트: pombredanne/petsc
static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
{
  PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
  PetscInt          *indices,nindx,j,k,loc;
  PetscMPIInt        issame;
  const PetscInt    *irow_indices,*icol_indices;
  MPI_Comm           scomm_row,scomm_col,scomm_mat;
  PetscErrorCode     ierr;

  PetscFunctionBegin;
  nindx = 0;
  /*
   * Estimate a maximum number for allocating memory
   */
  for(i=0; i<n; i++){
    ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
    ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
    nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
  }
  ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
  /* construct a submat */
  for(i=0; i<n; i++){
	/*comms */
    if(subcomm){
	  ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
	  ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
	  ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
	  if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
	  ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
	  if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
	}else{
	  scomm_row = PETSC_COMM_SELF;
	}
	/*get sub-matrix data*/
	sxadj=0; sadjncy=0; svalues=0;
    ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
    ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
    ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
    ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
    ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
    ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
    ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
    ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
    ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
    nindx = irow_n+icol_n;
    ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
    /* renumber columns */
    for(j=0; j<irow_n; j++){
      for(k=sxadj[j]; k<sxadj[j+1]; k++){
    	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
#if PETSC_USE_DEBUG
    	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
#endif
        sadjncy[k] = loc;
      }
    }
    if(scall==MAT_INITIAL_MATRIX){
      ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
    }else{
       Mat                sadj = *(submat[i]);
       Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
       ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
       ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
       if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
       ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
       ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
       if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
       ierr = PetscFree(sxadj);CHKERRQ(ierr);
       ierr = PetscFree(sadjncy);CHKERRQ(ierr);
       if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
    }
  }
  ierr = PetscFree(indices);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}