Actual source code: sliced.c

  1: #include <petscdmsliced.h>
  2: #include <petscmat.h>
  3: #include <petsc/private/dmimpl.h>

  5: /* CSR storage of the nonzero structure of a bs*bs matrix */
  6: typedef struct {
  7:   PetscInt bs,nz,*i,*j;
  8: } DMSlicedBlockFills;

 10: typedef struct  {
 11:   PetscInt           bs,n,N,Nghosts,*ghosts;
 12:   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
 13:   DMSlicedBlockFills *dfill,*ofill;
 14: } DM_Sliced;

 16: PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
 17: {
 18:   PetscErrorCode         ierr;
 19:   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
 20:   ISLocalToGlobalMapping lmap;
 21:   void                   (*aij)(void) = NULL;
 22:   DM_Sliced              *slice = (DM_Sliced*)dm->data;

 25:   bs   = slice->bs;
 26:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 27:   MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);
 28:   MatSetBlockSize(*J,bs);
 29:   MatSetType(*J,dm->mattype);
 30:   MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);
 31:   MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 32:   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
 33:   * good before going on with it. */
 34:   PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);
 35:   if (!aij) {
 36:     PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);
 37:   }
 38:   if (aij) {
 39:     if (bs == 1) {
 40:       MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);
 41:       MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 42:     } else if (!slice->d_nnz) {
 43:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);
 44:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);
 45:     } else {
 46:       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
 47:       PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);
 48:       for (i=0; i<slice->n*bs; i++) {
 49:         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
 50:                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
 51:         if (so_nnz) {
 52:           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
 53:         }
 54:       }
 55:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);
 56:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);
 57:       PetscFree2(sd_nnz,so_nnz);
 58:     }
 59:   }

 61:   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
 62:   PetscMalloc1(slice->n+slice->Nghosts,&globals);
 63:   MatGetOwnershipRange(*J,&rstart,NULL);
 64:   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;

 66:   for (i=0; i<slice->Nghosts; i++) {
 67:     globals[slice->n+i] = slice->ghosts[i];
 68:   }
 69:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);
 70:   MatSetLocalToGlobalMapping(*J,lmap,lmap);
 71:   ISLocalToGlobalMappingDestroy(&lmap);
 72:   MatSetDM(*J,dm);
 73:   return(0);
 74: }

 76: /*@C
 77:     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
 78:       be ghosts on this process

 80:     Not Collective

 82:     Input Parameters:
 83: +    slice - the DM object
 84: .    bs - block size
 85: .    nlocal - number of local (owned, non-ghost) blocks
 86: .    Nghosts - number of ghost blocks on this process
 87: -    ghosts - global indices of each ghost block

 89:     Level: advanced

 91: .seealso DMDestroy(), DMCreateGlobalVector()

 93: @*/
 94: PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
 95: {
 97:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

101:   PetscFree(slice->ghosts);
102:   PetscMalloc1(Nghosts,&slice->ghosts);
103:   PetscArraycpy(slice->ghosts,ghosts,Nghosts);
104:   slice->bs      = bs;
105:   slice->n       = nlocal;
106:   slice->Nghosts = Nghosts;
107:   return(0);
108: }

110: /*@C
111:     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced

113:     Not Collective

115:     Input Parameters:
116: +    slice - the DM object
117: .    d_nz  - number of block nonzeros per block row in diagonal portion of local
118:            submatrix  (same for all local rows)
119: .    d_nnz - array containing the number of block nonzeros in the various block rows
120:            of the in diagonal portion of the local (possibly different for each block
121:            row) or NULL.
122: .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
123:            submatrix (same for all local rows).
124: -    o_nnz - array containing the number of nonzeros in the various block rows of the
125:            off-diagonal portion of the local submatrix (possibly different for
126:            each block row) or NULL.

128:     Notes:
129:     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
130:     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().

132:     Level: advanced

134: .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
135:          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()

137: @*/
138: PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
139: {
140:   DM_Sliced *slice = (DM_Sliced*)dm->data;

144:   slice->d_nz  = d_nz;
145:   slice->d_nnz = (PetscInt*)d_nnz;
146:   slice->o_nz  = o_nz;
147:   slice->o_nnz = (PetscInt*)o_nnz;
148:   return(0);
149: }

151: static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
152: {
153:   PetscErrorCode     ierr;
154:   PetscInt           i,j,nz,*fi,*fj;
155:   DMSlicedBlockFills *f;

159:   if (*inf) {PetscFree3(*inf,(*inf)->i,(*inf)->j);}
160:   if (!fill) return(0);
161:   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
162:   PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);
163:   f->bs = bs;
164:   f->nz = nz;
165:   f->i  = fi;
166:   f->j  = fj;
167:   for (i=0,nz=0; i<bs; i++) {
168:     fi[i] = nz;
169:     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
170:   }
171:   fi[i] = nz;
172:   *inf  = f;
173:   return(0);
174: }

176: /*@C
177:     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
178:     of the matrix returned by DMSlicedGetMatrix().

180:     Logically Collective on dm

182:     Input Parameter:
183: +   sliced - the DM object
184: .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
185: -   ofill - the fill pattern in the off-diagonal blocks

187:     Notes:
188:     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
189:     See DMDASetBlockFills() for example usage.

191:     Level: advanced

193: .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
194: @*/
195: PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
196: {
197:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

202:   DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);
203:   DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);
204:   return(0);
205: }

207: static PetscErrorCode  DMDestroy_Sliced(DM dm)
208: {
210:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

213:   PetscFree(slice->ghosts);
214:   if (slice->dfill) {PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);}
215:   if (slice->ofill) {PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);}
216:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
217:   PetscFree(slice);
218:   return(0);
219: }

221: static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
222: {
224:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

229:   *gvec = NULL;
230:   VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);
231:   VecSetDM(*gvec,dm);
232:   return(0);
233: }

235: static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
236: {
238:   PetscBool      flg;

241:   VecGhostIsLocalForm(g,l,&flg);
242:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
243:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
244:   VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);
245:   return(0);
246: }

248: static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
249: {
251:   PetscBool      flg;

254:   VecGhostIsLocalForm(g,l,&flg);
255:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
256:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
257:   return(0);
258: }

260: /*MC
261:    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields

263:    See DMCreateSliced() for details.

265:   Level: intermediate

267: .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
268: M*/

270: PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
271: {
273:   DM_Sliced      *slice;

276:   PetscNewLog(p,&slice);
277:   p->data = slice;

279:   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
280:   p->ops->creatematrix       = DMCreateMatrix_Sliced;
281:   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
282:   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
283:   p->ops->destroy            = DMDestroy_Sliced;
284:   return(0);
285: }

287: /*@C
288:     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem

290:     Collective

292:     Input Parameter:
293: +   comm - the processors that will share the global vector
294: .   bs - the block size
295: .   nlocal - number of vector entries on this process
296: .   Nghosts - number of ghost points needed on this process
297: .   ghosts - global indices of all ghost points for this process
298: .   d_nnz - matrix preallocation information representing coupling within this process
299: -   o_nnz - matrix preallocation information representing coupling between this process and other processes

301:     Output Parameters:
302: .   slice - the slice object

304:     Notes:
305:         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
306:         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
307:         the ghost points.

309:         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().

311:     Level: advanced

313: .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
314:          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()

316: @*/
317: PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
318: {

323:   DMCreate(comm,dm);
324:   DMSetType(*dm,DMSLICED);
325:   DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);
326:   if (d_nnz) {
327:     DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);
328:   }
329:   return(0);
330: }