PetscErrorCode DMCreateMatrix_Composite(DM dm,MatType mtype,Mat *J) { PetscErrorCode ierr; PetscBool usenest; ISLocalToGlobalMapping ltogmap,ltogmapb; PetscFunctionBegin; ierr = DMSetUp(dm);CHKERRQ(ierr); ierr = PetscStrcmp(mtype,MATNEST,&usenest);CHKERRQ(ierr); if (usenest) { ierr = DMCreateMatrix_Composite_Nest(dm,mtype,J);CHKERRQ(ierr); } else { ierr = DMCreateMatrix_Composite_AIJ(dm,mtype ? mtype : MATAIJ,J);CHKERRQ(ierr); } ierr = DMGetLocalToGlobalMapping(dm,<ogmap);CHKERRQ(ierr); ierr = DMGetLocalToGlobalMappingBlock(dm,<ogmapb);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMappingBlock(*J,ltogmapb,ltogmapb);CHKERRQ(ierr); PetscFunctionReturn(0); }
static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J) { DM_Redundant *red = (DM_Redundant*)dm->data; PetscErrorCode ierr; ISLocalToGlobalMapping ltog,ltogb; PetscInt i,rstart,rend,*cols; PetscScalar *vals; PetscFunctionBegin; ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr); ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr); ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr); ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr); ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr); ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr); ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr); ierr = DMGetLocalToGlobalMapping(dm,<og);CHKERRQ(ierr); ierr = DMGetLocalToGlobalMappingBlock(dm,<ogb);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr); ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr); for (i=0; i<red->N; i++) { cols[i] = i; vals[i] = 0.0; } ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); for (i=rstart; i<rend; i++) { ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr); } ierr = PetscFree2(cols,vals);CHKERRQ(ierr); ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C SlicedGetMatrix - Creates a matrix with the correct parallel layout required for computing the Jacobian on a function defined using the informatin in Sliced. Collective on Sliced Input Parameter: + slice - the slice object - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ, or any type which inherits from one of these (such as MATAIJ, MATLUSOL, etc.). Output Parameters: . J - matrix with the correct nonzero preallocation (obviously without the correct Jacobian values) Level: advanced Notes: This properly preallocates the number of nonzeros in the sparse matrix so you do not need to do it yourself. .seealso ISColoringView(), ISColoringGetIS(), MatFDColoringCreate(), DASetBlockFills() @*/ PetscErrorCode PETSCDM_DLLEXPORT SlicedGetMatrix(Sliced slice, const MatType mtype,Mat *J) { PetscErrorCode ierr; PetscInt *globals,*sd_nnz,*so_nnz,rstart,bs,i; ISLocalToGlobalMapping lmap,blmap; void (*aij)(void) = PETSC_NULL; PetscFunctionBegin; bs = slice->bs; ierr = MatCreate(((PetscObject)slice)->comm,J);CHKERRQ(ierr); ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); ierr = MatSetType(*J,mtype);CHKERRQ(ierr); ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr); ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr); /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any * good before going on with it. */ ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr); if (!aij) { ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr); } if (aij) { if (bs == 1) { ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr); ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr); } else if (!slice->d_nnz) { ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr); ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr); } else { /* The user has provided preallocation per block-row, convert it to per scalar-row respecting SlicedSetBlockFills() if applicable */ ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr); for (i=0; i<slice->n*bs; i++) { sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs) + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs); if (so_nnz) { so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs); } } ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr); ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr); ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr); } } ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr); /* Set up the local to global map. For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */ ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr); ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr); for (i=0; i<slice->n*bs; i++) { globals[i] = rstart + i; } for (i=0; i<slice->Nghosts*bs; i++) { globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs; } ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,&lmap);CHKERRQ(ierr); ierr = PetscFree(globals);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMapping(*J,lmap);CHKERRQ(ierr); ierr = MatSetLocalToGlobalMappingBlock(*J,blmap);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(lmap);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(blmap);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@ MatCreateLocalRef - Gets a logical reference to a local submatrix, for use in assembly Not Collective Input Arguments: + A - Full matrix, generally parallel . isrow - Local index set for the rows - iscol - Local index set for the columns Output Arguments: . newmat - New serial Mat Level: developer Notes: Most will use MatGetLocalSubMatrix() which returns a real matrix corresponding to the local block if it available, such as with matrix formats that store these blocks separately. The new matrix forwards MatSetValuesLocal() and MatSetValuesBlockedLocal() to the global system. In general, it does not define MatMult() or any other functions. Local submatrices can be nested. .seealso: MatSetValuesLocal(), MatSetValuesBlockedLocal(), MatGetLocalSubMatrix(), MatCreateSubMatrix() @*/ PetscErrorCode MatCreateLocalRef(Mat A,IS isrow,IS iscol,Mat *newmat) { PetscErrorCode ierr; Mat_LocalRef *lr; Mat B; PetscInt m,n; PetscBool islr; PetscFunctionBegin; PetscValidHeaderSpecific(A,MAT_CLASSID,1); PetscValidHeaderSpecific(isrow,IS_CLASSID,2); PetscValidHeaderSpecific(iscol,IS_CLASSID,3); PetscValidPointer(newmat,4); *newmat = 0; ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); ierr = ISGetLocalSize(isrow,&m);CHKERRQ(ierr); ierr = ISGetLocalSize(iscol,&n);CHKERRQ(ierr); ierr = MatSetSizes(B,m,n,m,n);CHKERRQ(ierr); ierr = PetscObjectChangeTypeName((PetscObject)B,MATLOCALREF);CHKERRQ(ierr); ierr = MatSetUp(B);CHKERRQ(ierr); B->ops->destroy = MatDestroy_LocalRef; ierr = PetscNewLog(B,Mat_LocalRef,&lr);CHKERRQ(ierr); B->data = (void*)lr; ierr = PetscObjectTypeCompare((PetscObject)A,MATLOCALREF,&islr);CHKERRQ(ierr); if (islr) { Mat_LocalRef *alr = (Mat_LocalRef*)A->data; lr->Top = alr->Top; } else { /* This does not increase the reference count because MatLocalRef is not allowed to live longer than its parent */ lr->Top = A; } { ISLocalToGlobalMapping rltog,cltog; PetscInt abs,rbs,cbs; /* We will translate directly to global indices for the top level */ lr->SetValues = MatSetValues; lr->SetValuesBlocked = MatSetValuesBlocked; B->ops->setvalueslocal = MatSetValuesLocal_LocalRef_Scalar; ierr = ISL2GCompose(isrow,A->rmap->mapping,&rltog);CHKERRQ(ierr); if (isrow == iscol && A->rmap->mapping == A->cmap->mapping) { ierr = PetscObjectReference((PetscObject)rltog);CHKERRQ(ierr); cltog = rltog; } else { ierr = ISL2GCompose(iscol,A->cmap->mapping,&cltog);CHKERRQ(ierr); } ierr = MatSetLocalToGlobalMapping(B,rltog,cltog);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&rltog);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&cltog);CHKERRQ(ierr); ierr = MatGetBlockSize(A,&abs);CHKERRQ(ierr); ierr = ISGetBlockSize(isrow,&rbs);CHKERRQ(ierr); ierr = ISGetBlockSize(iscol,&cbs);CHKERRQ(ierr); if (rbs == cbs) { /* submatrix has block structure, so user can insert values with blocked interface */ ierr = PetscLayoutSetBlockSize(B->rmap,rbs);CHKERRQ(ierr); ierr = PetscLayoutSetBlockSize(B->cmap,cbs);CHKERRQ(ierr); if (abs != rbs || abs == 1) { /* Top-level matrix has different block size, so we have to call its scalar insertion interface */ B->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_LocalRef_Scalar; } else { /* Block sizes match so we can forward values to the top level using the block interface */ B->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_LocalRef_Block; ierr = ISL2GComposeBlock(isrow,A->rmap->bmapping,&rltog);CHKERRQ(ierr); if (isrow == iscol && A->rmap->bmapping == A->cmap->bmapping) { ierr = PetscObjectReference((PetscObject)rltog);CHKERRQ(ierr); cltog = rltog; } else { ierr = ISL2GComposeBlock(iscol,A->cmap->bmapping,&cltog);CHKERRQ(ierr); } ierr = MatSetLocalToGlobalMappingBlock(B,rltog,cltog);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&rltog);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&cltog);CHKERRQ(ierr); } } } *newmat = B; PetscFunctionReturn(0); }