Actual source code: dmredundant.c

  1: #include <petsc/private/dmimpl.h>
  2: #include <petscdmredundant.h>

  4: typedef struct  {
  5:   PetscMPIInt rank;                /* owner */
  6:   PetscInt    N;                   /* total number of dofs */
  7:   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
  8: } DM_Redundant;

 10: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
 11: {
 12:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 13:   PetscErrorCode         ierr;
 14:   ISLocalToGlobalMapping ltog;
 15:   PetscInt               i,rstart,rend,*cols;
 16:   PetscScalar            *vals;

 19:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 20:   MatSetSizes(*J,red->n,red->n,red->N,red->N);
 21:   MatSetType(*J,dm->mattype);
 22:   MatSeqAIJSetPreallocation(*J,red->n,NULL);
 23:   MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
 24:   MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
 25:   MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);

 27:   DMGetLocalToGlobalMapping(dm,&ltog);
 28:   MatSetLocalToGlobalMapping(*J,ltog,ltog);
 29:   MatSetDM(*J,dm);

 31:   PetscMalloc2(red->N,&cols,red->N,&vals);
 32:   for (i=0; i<red->N; i++) {
 33:     cols[i] = i;
 34:     vals[i] = 0.0;
 35:   }
 36:   MatGetOwnershipRange(*J,&rstart,&rend);
 37:   for (i=rstart; i<rend; i++) {
 38:     MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
 39:   }
 40:   PetscFree2(cols,vals);
 41:   MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
 42:   MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
 43:   return(0);
 44: }

 46: static PetscErrorCode DMDestroy_Redundant(DM dm)
 47: {

 51:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
 52:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
 53:   PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);
 54:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
 55:   PetscFree(dm->data);
 56:   return(0);
 57: }

 59: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
 60: {
 61:   PetscErrorCode         ierr;
 62:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 63:   ISLocalToGlobalMapping ltog;

 68:   *gvec = NULL;
 69:   VecCreate(PetscObjectComm((PetscObject)dm),gvec);
 70:   VecSetSizes(*gvec,red->n,red->N);
 71:   VecSetType(*gvec,dm->vectype);
 72:   DMGetLocalToGlobalMapping(dm,&ltog);
 73:   VecSetLocalToGlobalMapping(*gvec,ltog);
 74:   VecSetDM(*gvec,dm);
 75:   return(0);
 76: }

 78: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
 79: {
 81:   DM_Redundant   *red = (DM_Redundant*)dm->data;

 86:   *lvec = NULL;
 87:   VecCreate(PETSC_COMM_SELF,lvec);
 88:   VecSetSizes(*lvec,red->N,red->N);
 89:   VecSetType(*lvec,dm->vectype);
 90:   VecSetDM(*lvec,dm);
 91:   return(0);
 92: }

 94: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
 95: {
 96:   PetscErrorCode    ierr;
 97:   DM_Redundant      *red = (DM_Redundant*)dm->data;
 98:   const PetscScalar *lv;
 99:   PetscScalar       *gv;
100:   PetscMPIInt       rank;

103:   MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
104:   VecGetArrayRead(l,&lv);
105:   VecGetArray(g,&gv);
106:   switch (imode) {
107:   case ADD_VALUES:
108:   case MAX_VALUES:
109:   {
110:     void        *source;
111:     PetscScalar *buffer;
112:     PetscInt    i;
113:     if (rank == red->rank) {
114: #if defined(PETSC_HAVE_MPI_IN_PLACE)
115:       buffer = gv;
116:       source = MPI_IN_PLACE;
117: #else
118:       PetscMalloc1(red->N,&buffer);
119:       source = buffer;
120: #endif
121:       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
122: #if !defined(PETSC_USE_COMPLEX)
123:       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
124: #endif
125:     } else source = (void*)lv;
126:     MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));
127: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
128:     if (rank == red->rank) {PetscFree(buffer);}
129: #endif
130:   } break;
131:   case INSERT_VALUES:
132:     PetscArraycpy(gv,lv,red->n);
133:     break;
134:   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
135:   }
136:   VecRestoreArrayRead(l,&lv);
137:   VecRestoreArray(g,&gv);
138:   return(0);
139: }

141: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
142: {
144:   return(0);
145: }

147: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
148: {
149:   PetscErrorCode    ierr;
150:   DM_Redundant      *red = (DM_Redundant*)dm->data;
151:   const PetscScalar *gv;
152:   PetscScalar       *lv;

155:   VecGetArrayRead(g,&gv);
156:   VecGetArray(l,&lv);
157:   switch (imode) {
158:   case INSERT_VALUES:
159:     if (red->n) {PetscArraycpy(lv,gv,red->n);}
160:     MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
161:     break;
162:   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
163:   }
164:   VecRestoreArrayRead(g,&gv);
165:   VecRestoreArray(l,&lv);
166:   return(0);
167: }

169: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
170: {
172:   return(0);
173: }

175: static PetscErrorCode DMSetUp_Redundant(DM dm)
176: {
178:   return(0);
179: }

181: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
182: {
184:   DM_Redundant   *red = (DM_Redundant*)dm->data;
185:   PetscBool      iascii;

188:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
189:   if (iascii) {
190:     PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
191:   }
192:   return(0);
193: }

195: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
196: {
197:   DM_Redundant    *red = (DM_Redundant*)dm->data;
198:   PetscErrorCode  ierr;
199:   PetscInt        i,nloc;
200:   ISColoringValue *colors;

203:   switch (ctype) {
204:   case IS_COLORING_GLOBAL:
205:     nloc = red->n;
206:     break;
207:   case IS_COLORING_LOCAL:
208:     nloc = red->N;
209:     break;
210:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
211:   }
212:   PetscMalloc1(nloc,&colors);
213:   for (i=0; i<nloc; i++) colors[i] = i;
214:   ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);
215:   ISColoringSetType(*coloring,ctype);
216:   return(0);
217: }

219: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
220: {
222:   PetscMPIInt    flag;
223:   DM_Redundant   *redc = (DM_Redundant*)dmc->data;

226:   if (comm == MPI_COMM_NULL) {
227:     PetscObjectGetComm((PetscObject)dmc,&comm);
228:   }
229:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
230:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
231:   DMRedundantCreate(comm,redc->rank,redc->N,dmf);
232:   return(0);
233: }

235: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
236: {
238:   PetscMPIInt    flag;
239:   DM_Redundant   *redf = (DM_Redundant*)dmf->data;

242:   if (comm == MPI_COMM_NULL) {
243:     PetscObjectGetComm((PetscObject)dmf,&comm);
244:   }
245:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
246:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
247:   DMRedundantCreate(comm,redf->rank,redf->N,dmc);
248:   return(0);
249: }

251: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
252: {
254:   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
255:   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
256:   PetscMPIInt    flag;
257:   PetscInt       i,rstart,rend;

260:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
261:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
262:   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
263:   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
264:   MatCreate(PetscObjectComm((PetscObject)dmc),P);
265:   MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
266:   MatSetType(*P,MATAIJ);
267:   MatSeqAIJSetPreallocation(*P,1,NULL);
268:   MatMPIAIJSetPreallocation(*P,1,NULL,0,NULL);
269:   MatGetOwnershipRange(*P,&rstart,&rend);
270:   for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
271:   MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
272:   MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
273:   if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
274:   return(0);
275: }

277: /*@
278:     DMRedundantSetSize - Sets the size of a densely coupled redundant object

280:     Collective on dm

282:     Input Parameters:
283: +   dm - redundant DM
284: .   rank - rank of process to own redundant degrees of freedom
285: -   N - total number of redundant degrees of freedom

287:     Level: advanced

289: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
290: @*/
291: PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
292: {

300:   PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
301:   return(0);
302: }

304: /*@
305:     DMRedundantGetSize - Gets the size of a densely coupled redundant object

307:     Not Collective

309:     Input Parameter:
310: .   dm - redundant DM

312:     Output Parameters:
313: +   rank - rank of process to own redundant degrees of freedom (or NULL)
314: -   N - total number of redundant degrees of freedom (or NULL)

316:     Level: advanced

318: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
319: @*/
320: PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
321: {

327:   PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
328:   return(0);
329: }

331: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
332: {
333:   DM_Redundant   *red = (DM_Redundant*)dm->data;
335:   PetscMPIInt    myrank;
336:   PetscInt       i,*globals;

339:   MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
340:   red->rank = rank;
341:   red->N    = N;
342:   red->n    = (myrank == rank) ? N : 0;

344:   /* mapping is setup here */
345:   PetscMalloc1(red->N,&globals);
346:   for (i=0; i<red->N; i++) globals[i] = i;
347:   ISLocalToGlobalMappingDestroy(&dm->ltogmap);
348:   ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
349:   return(0);
350: }

352: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
353: {
354:   DM_Redundant *red = (DM_Redundant*)dm->data;

357:   if (rank) *rank = red->rank;
358:   if (N)    *N = red->N;
359:   return(0);
360: }

362: static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
363: {
365:   return(0);
366: }

368: /*MC
369:    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
370:          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
371:          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
372:          processes local computations).

374:          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.

376:   Level: intermediate

378: .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
379: M*/

381: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
382: {
384:   DM_Redundant   *red;

387:   PetscNewLog(dm,&red);
388:   dm->data = red;

390:   dm->ops->setup               = DMSetUp_Redundant;
391:   dm->ops->view                = DMView_Redundant;
392:   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
393:   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
394:   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
395:   dm->ops->destroy             = DMDestroy_Redundant;
396:   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
397:   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
398:   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
399:   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
400:   dm->ops->refine              = DMRefine_Redundant;
401:   dm->ops->coarsen             = DMCoarsen_Redundant;
402:   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
403:   dm->ops->getcoloring         = DMCreateColoring_Redundant;

405:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
406:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
407:   PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);
408:   return(0);
409: }

411: /*@C
412:     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables

414:     Collective

416:     Input Parameters:
417: +   comm - the processors that will share the global vector
418: .   rank - rank to own the redundant values
419: -   N - total number of degrees of freedom

421:     Output Parameters:
422: .   dm - the redundant DM

424:     Level: advanced

426: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()

428: @*/
429: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
430: {

435:   DMCreate(comm,dm);
436:   DMSetType(*dm,DMREDUNDANT);
437:   DMRedundantSetSize(*dm,rank,N);
438:   DMSetUp(*dm);
439:   return(0);
440: }