Actual source code: dmredundant.c

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1:  #include <petsc/private/dmimpl.h>
  2:  #include <petscdmredundant.h>

  4: typedef struct  {
  5:   PetscMPIInt rank;                /* owner */
  6:   PetscInt    N;                   /* total number of dofs */
  7:   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
  8: } DM_Redundant;

 10: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
 11: {
 12:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 13:   PetscErrorCode         ierr;
 14:   ISLocalToGlobalMapping ltog;
 15:   PetscInt               i,rstart,rend,*cols;
 16:   PetscScalar            *vals;

 19:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 20:   MatSetSizes(*J,red->n,red->n,red->N,red->N);
 21:   MatSetType(*J,dm->mattype);
 22:   MatSeqAIJSetPreallocation(*J,red->n,NULL);
 23:   MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
 24:   MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
 25:   MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);

 27:   DMGetLocalToGlobalMapping(dm,&ltog);
 28:   MatSetLocalToGlobalMapping(*J,ltog,ltog);

 30:   PetscMalloc2(red->N,&cols,red->N,&vals);
 31:   for (i=0; i<red->N; i++) {
 32:     cols[i] = i;
 33:     vals[i] = 0.0;
 34:   }
 35:   MatGetOwnershipRange(*J,&rstart,&rend);
 36:   for (i=rstart; i<rend; i++) {
 37:     MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
 38:   }
 39:   PetscFree2(cols,vals);
 40:   MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
 41:   MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
 42:   return(0);
 43: }

 45: static PetscErrorCode DMDestroy_Redundant(DM dm)
 46: {

 50:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
 51:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
 52:   PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);
 53:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
 54:   PetscFree(dm->data);
 55:   return(0);
 56: }

 58: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
 59: {
 60:   PetscErrorCode         ierr;
 61:   DM_Redundant           *red = (DM_Redundant*)dm->data;
 62:   ISLocalToGlobalMapping ltog;

 67:   *gvec = 0;
 68:   VecCreate(PetscObjectComm((PetscObject)dm),gvec);
 69:   VecSetSizes(*gvec,red->n,red->N);
 70:   VecSetType(*gvec,dm->vectype);
 71:   DMGetLocalToGlobalMapping(dm,&ltog);
 72:   VecSetLocalToGlobalMapping(*gvec,ltog);
 73:   VecSetDM(*gvec,dm);
 74:   return(0);
 75: }

 77: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
 78: {
 80:   DM_Redundant   *red = (DM_Redundant*)dm->data;

 85:   *lvec = 0;
 86:   VecCreate(PETSC_COMM_SELF,lvec);
 87:   VecSetSizes(*lvec,red->N,red->N);
 88:   VecSetType(*lvec,dm->vectype);
 89:   VecSetDM(*lvec,dm);
 90:   return(0);
 91: }

 93: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
 94: {
 95:   PetscErrorCode    ierr;
 96:   DM_Redundant      *red = (DM_Redundant*)dm->data;
 97:   const PetscScalar *lv;
 98:   PetscScalar       *gv;
 99:   PetscMPIInt       rank;

102:   MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
103:   VecGetArrayRead(l,&lv);
104:   VecGetArray(g,&gv);
105:   switch (imode) {
106:   case ADD_VALUES:
107:   case MAX_VALUES:
108:   {
109:     void        *source;
110:     PetscScalar *buffer;
111:     PetscInt    i;
112:     if (rank == red->rank) {
113: #if defined(PETSC_HAVE_MPI_IN_PLACE)
114:       buffer = gv;
115:       source = MPI_IN_PLACE;
116: #else
117:       PetscMalloc1(red->N,&buffer);
118:       source = buffer;
119: #endif
120:       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
121: #if !defined(PETSC_USE_COMPLEX)
122:       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
123: #endif
124:     } else source = (void*)lv;
125:     MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));
126: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
127:     if (rank == red->rank) {PetscFree(buffer);}
128: #endif
129:   } break;
130:   case INSERT_VALUES:
131:     PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));
132:     break;
133:   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
134:   }
135:   VecRestoreArrayRead(l,&lv);
136:   VecRestoreArray(g,&gv);
137:   return(0);
138: }

140: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
141: {
143:   return(0);
144: }

146: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
147: {
148:   PetscErrorCode    ierr;
149:   DM_Redundant      *red = (DM_Redundant*)dm->data;
150:   const PetscScalar *gv;
151:   PetscScalar       *lv;

154:   VecGetArrayRead(g,&gv);
155:   VecGetArray(l,&lv);
156:   switch (imode) {
157:   case INSERT_VALUES:
158:     if (red->n) {PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));}
159:     MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
160:     break;
161:   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
162:   }
163:   VecRestoreArrayRead(g,&gv);
164:   VecRestoreArray(l,&lv);
165:   return(0);
166: }

168: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
169: {
171:   return(0);
172: }

174: static PetscErrorCode DMSetUp_Redundant(DM dm)
175: {
177:   DM_Redundant   *red = (DM_Redundant*)dm->data;
178:   PetscInt       i,*globals;

181:   PetscMalloc1(red->N,&globals);
182:   for (i=0; i<red->N; i++) globals[i] = i;
183:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
184:   return(0);
185: }

187: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
188: {
190:   DM_Redundant   *red = (DM_Redundant*)dm->data;
191:   PetscBool      iascii;

194:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
195:   if (iascii) {
196:     PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
197:   }
198:   return(0);
199: }

201: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
202: {
203:   DM_Redundant    *red = (DM_Redundant*)dm->data;
204:   PetscErrorCode  ierr;
205:   PetscInt        i,nloc;
206:   ISColoringValue *colors;

209:   switch (ctype) {
210:   case IS_COLORING_GLOBAL:
211:     nloc = red->n;
212:     break;
213:   case IS_COLORING_LOCAL:
214:     nloc = red->N;
215:     break;
216:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
217:   }
218:   PetscMalloc1(nloc,&colors);
219:   for (i=0; i<nloc; i++) colors[i] = i;
220:   ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);
221:   ISColoringSetType(*coloring,ctype);
222:   return(0);
223: }

225: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
226: {
228:   PetscMPIInt    flag;
229:   DM_Redundant   *redc = (DM_Redundant*)dmc->data;

232:   if (comm == MPI_COMM_NULL) {
233:     PetscObjectGetComm((PetscObject)dmc,&comm);
234:   }
235:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
236:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
237:   DMRedundantCreate(comm,redc->rank,redc->N,dmf);
238:   return(0);
239: }

241: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
242: {
244:   PetscMPIInt    flag;
245:   DM_Redundant   *redf = (DM_Redundant*)dmf->data;

248:   if (comm == MPI_COMM_NULL) {
249:     PetscObjectGetComm((PetscObject)dmf,&comm);
250:   }
251:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
252:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
253:   DMRedundantCreate(comm,redf->rank,redf->N,dmc);
254:   return(0);
255: }

257: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
258: {
260:   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
261:   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
262:   PetscMPIInt    flag;
263:   PetscInt       i,rstart,rend;

266:   MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
267:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
268:   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
269:   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
270:   MatCreate(PetscObjectComm((PetscObject)dmc),P);
271:   MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
272:   MatSetType(*P,MATAIJ);
273:   MatSeqAIJSetPreallocation(*P,1,0);
274:   MatMPIAIJSetPreallocation(*P,1,0,0,0);
275:   MatGetOwnershipRange(*P,&rstart,&rend);
276:   for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
277:   MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
278:   MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
279:   if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
280:   return(0);
281: }

283: /*@
284:     DMRedundantSetSize - Sets the size of a densely coupled redundant object

286:     Collective on DM

288:     Input Parameter:
289: +   dm - redundant DM
290: .   rank - rank of process to own redundant degrees of freedom
291: -   N - total number of redundant degrees of freedom

293:     Level: advanced

295: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
296: @*/
297: PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
298: {

306:   PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
307:   return(0);
308: }

310: /*@
311:     DMRedundantGetSize - Gets the size of a densely coupled redundant object

313:     Not Collective

315:     Input Parameter:
316: +   dm - redundant DM

318:     Output Parameters:
319: +   rank - rank of process to own redundant degrees of freedom (or NULL)
320: -   N - total number of redundant degrees of freedom (or NULL)

322:     Level: advanced

324: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
325: @*/
326: PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
327: {

333:   PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
334:   return(0);
335: }

337: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
338: {
339:   DM_Redundant   *red = (DM_Redundant*)dm->data;
341:   PetscMPIInt    myrank;

344:   MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
345:   red->rank = rank;
346:   red->N    = N;
347:   red->n    = (myrank == rank) ? N : 0;
348:   return(0);
349: }

351: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
352: {
353:   DM_Redundant *red = (DM_Redundant*)dm->data;

356:   if (rank) *rank = red->rank;
357:   if (N)    *N = red->N;
358:   return(0);
359: }

361: static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
362: {
364:   return(0);
365: }

367: /*MC
368:    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
369:          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
370:          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
371:          processes local computations).

373:          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.


376:   Level: intermediate

378: .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
379: M*/

381: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
382: {
384:   DM_Redundant   *red;

387:   PetscNewLog(dm,&red);
388:   dm->data = red;

390:   PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);

392:   dm->ops->setup               = DMSetUp_Redundant;
393:   dm->ops->view                = DMView_Redundant;
394:   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
395:   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
396:   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
397:   dm->ops->destroy             = DMDestroy_Redundant;
398:   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
399:   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
400:   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
401:   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
402:   dm->ops->refine              = DMRefine_Redundant;
403:   dm->ops->coarsen             = DMCoarsen_Redundant;
404:   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
405:   dm->ops->getcoloring         = DMCreateColoring_Redundant;

407:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
408:   PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
409:   PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);
410:   return(0);
411: }

413: /*@C
414:     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables

416:     Collective on MPI_Comm

418:     Input Parameter:
419: +   comm - the processors that will share the global vector
420: .   rank - rank to own the redundant values
421: -   N - total number of degrees of freedom

423:     Output Parameters:
424: .   dm - the redundant DM

426:     Level: advanced

428: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()

430: @*/
431: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
432: {

437:   DMCreate(comm,dm);
438:   DMSetType(*dm,DMREDUNDANT);
439:   DMRedundantSetSize(*dm,rank,N);
440:   DMSetUp(*dm);
441:   return(0);
442: }