/*@C SlicedCreateGlobalVector - Creates a vector of the correct size to be gathered into by the slice. Collective on Sliced Input Parameter: . slice - the slice object Output Parameters: . gvec - the global vector Level: advanced Notes: Once this has been created you cannot add additional arrays or vectors to be packed. .seealso SlicedDestroy(), SlicedCreate(), SlicedGetGlobalIndices() @*/ PetscErrorCode PETSCDM_DLLEXPORT SlicedCreateGlobalVector(Sliced slice,Vec *gvec) { PetscErrorCode ierr; PetscInt bs,cnt,i; PetscFunctionBegin; PetscValidHeaderSpecific(slice,DM_COOKIE,1); PetscValidPointer(gvec,2); *gvec = 0; if (slice->globalvector) { ierr = PetscObjectGetReference((PetscObject)slice->globalvector,&cnt);CHKERRQ(ierr); if (cnt == 1) { /* Nobody else has a reference so we can just reference it and give it away */ *gvec = slice->globalvector; ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr); ierr = VecZeroEntries(*gvec);CHKERRQ(ierr); } else { /* Someone else has a reference so we duplicate the global vector */ ierr = VecDuplicate(slice->globalvector,gvec);CHKERRQ(ierr); } } else { bs = slice->bs; /* VecCreateGhostBlock requires ghosted blocks to be given in terms of first entry, not block. Here, we munge the * ghost array for this call, then put it back. */ for (i=0; i<slice->Nghosts; i++) slice->ghosts[i] *= bs; ierr = VecCreateGhostBlock(((PetscObject)slice)->comm,bs,slice->n*bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,&slice->globalvector);CHKERRQ(ierr); for (i=0; i<slice->Nghosts; i++) slice->ghosts[i] /= bs; *gvec = slice->globalvector; ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr); } PetscFunctionReturn(0); }
dErr VecCreateDohp(MPI_Comm comm,dInt bs,dInt n,dInt nc,dInt nghosts,const dInt ghosts[],Vec *v) { Vec_MPI *vmpi; Vec vc,vg; dScalar *a; dErr err; dFunctionBegin; dValidPointer(v,7); *v = 0; err = VecCreateGhostBlock(comm,bs,nc*bs,PETSC_DECIDE,nghosts,ghosts,&vc);dCHK(err); err = VecGetArray(vc,&a);dCHK(err); err = VecCreateMPIWithArray(comm,n*bs,PETSC_DECIDE,a,&vg);dCHK(err); err = VecRestoreArray(vc,&a);dCHK(err); err = VecSetBlockSize(vg,bs);dCHK(err); vmpi = vg->data; if (vmpi->localrep) dERROR(PETSC_COMM_SELF,1,"Vector has localrep, expected no localrep"); vmpi->localrep = vc; /* subvert this field to mean closed rep */ /* Since we subvect .localrep, VecDestroy_MPI will automatically destroy the closed form */ vg->ops->duplicate = VecDuplicate_Dohp; //vg->ops->destroy = VecDestroy_Dohp; /* It might be useful to set the (block) LocalToGlobal mapping here, but in the use case I have in mind, the user is * always working with the closed form anyway (in function evaluation). The \e matrix does need a customized * LocalToGlobal mapping. */ err = PetscObjectChangeTypeName((dObject)vg,VECDOHP);dCHK(err); *v = vg; dFunctionReturn(0); }
static PetscErrorCode DMCreateGlobalVector_Sliced(DM dm,Vec *gvec) { PetscErrorCode ierr; DM_Sliced *slice = (DM_Sliced*)dm->data; PetscFunctionBegin; PetscValidHeaderSpecific(dm,DM_CLASSID,1); PetscValidPointer(gvec,2); *gvec = 0; ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr); ierr = VecSetDM(*gvec,dm);CHKERRQ(ierr); PetscFunctionReturn(0); }
LData::LData(const std::string& name, const unsigned int num_local_nodes, const unsigned int depth, const std::vector<int>& nonlocal_petsc_indices) : d_name(name), d_global_node_count(0), d_local_node_count(0), d_ghost_node_count(0), d_depth(depth), d_nonlocal_petsc_indices(nonlocal_petsc_indices), d_global_vec(NULL), d_managing_petsc_vec(true), d_array(NULL), d_boost_array(NULL), d_boost_local_array(NULL), d_boost_vec_array(NULL), d_boost_local_vec_array(NULL), d_ghosted_local_vec(NULL), d_ghosted_local_array(NULL), d_boost_ghosted_local_array(NULL), d_boost_vec_ghosted_local_array(NULL) { // Create the PETSc Vec that provides storage for the Lagrangian data. int ierr; if (d_depth == 1) { ierr = VecCreateGhost(PETSC_COMM_WORLD, num_local_nodes, PETSC_DECIDE, static_cast<int>(d_nonlocal_petsc_indices.size()), d_nonlocal_petsc_indices.empty() ? NULL : &d_nonlocal_petsc_indices[0], &d_global_vec); IBTK_CHKERRQ(ierr); } else { ierr = VecCreateGhostBlock(PETSC_COMM_WORLD, d_depth, d_depth * num_local_nodes, PETSC_DECIDE, static_cast<int>(d_nonlocal_petsc_indices.size()), d_nonlocal_petsc_indices.empty() ? NULL : &d_nonlocal_petsc_indices[0], &d_global_vec); IBTK_CHKERRQ(ierr); } int global_node_count; ierr = VecGetSize(d_global_vec, &global_node_count); IBTK_CHKERRQ(ierr); #if !defined(NDEBUG) TBOX_ASSERT(global_node_count >= 0); #endif d_global_node_count = global_node_count; d_global_node_count /= d_depth; d_local_node_count = num_local_nodes; d_ghost_node_count = static_cast<int>(d_nonlocal_petsc_indices.size()); return; } // LData
PETSC_EXTERN void PETSC_STDCALL veccreateghostblock_(MPI_Fint * comm,PetscInt *bs,PetscInt *n,PetscInt *N,PetscInt *nghost, PetscInt ghosts[],Vec *vv, int *__ierr ){ *__ierr = VecCreateGhostBlock( MPI_Comm_f2c( *(comm) ),*bs,*n,*N,*nghost,ghosts,vv); }
LData::LData(Pointer<Database> db) : d_name(db->getString("d_name")), d_global_node_count(0), d_local_node_count(0), d_ghost_node_count(0), d_depth(db->getInteger("d_depth")), d_nonlocal_petsc_indices(), d_global_vec(NULL), d_array(NULL), d_boost_array(NULL), d_boost_local_array(NULL), d_boost_vec_array(NULL), d_boost_local_vec_array(NULL), d_ghosted_local_vec(NULL), d_ghosted_local_array(NULL), d_boost_ghosted_local_array(NULL), d_boost_vec_ghosted_local_array(NULL) { int num_local_nodes = db->getInteger("num_local_nodes"); int num_ghost_nodes = db->getInteger("num_ghost_nodes"); d_nonlocal_petsc_indices.resize(num_ghost_nodes); if (num_ghost_nodes > 0) { db->getIntegerArray("d_nonlocal_petsc_indices", d_nonlocal_petsc_indices.empty() ? NULL : &d_nonlocal_petsc_indices[0], num_ghost_nodes); } // Create the PETSc Vec which actually provides the storage for the // Lagrangian data. int ierr; if (d_depth == 1) { ierr = VecCreateGhost(PETSC_COMM_WORLD, num_local_nodes, PETSC_DECIDE, static_cast<int>(d_nonlocal_petsc_indices.size()), d_nonlocal_petsc_indices.empty() ? NULL : &d_nonlocal_petsc_indices[0], &d_global_vec); IBTK_CHKERRQ(ierr); } else { ierr = VecCreateGhostBlock(PETSC_COMM_WORLD, d_depth, d_depth * num_local_nodes, PETSC_DECIDE, static_cast<int>(d_nonlocal_petsc_indices.size()), d_nonlocal_petsc_indices.empty() ? NULL : &d_nonlocal_petsc_indices[0], &d_global_vec); IBTK_CHKERRQ(ierr); } int global_node_count; ierr = VecGetSize(d_global_vec, &global_node_count); IBTK_CHKERRQ(ierr); #if !defined(NDEBUG) TBOX_ASSERT(global_node_count >= 0); #endif d_global_node_count = global_node_count; d_global_node_count /= d_depth; d_local_node_count = num_local_nodes; d_ghost_node_count = static_cast<int>(d_nonlocal_petsc_indices.size()); // Extract the values from the database. double* ghosted_local_vec_array = getGhostedLocalFormVecArray()->data(); if (num_local_nodes + num_ghost_nodes > 0) { db->getDoubleArray("vals", ghosted_local_vec_array, d_depth * (num_local_nodes + num_ghost_nodes)); } restoreArrays(); return; } // LData