Actual source code: dmredundant.c
petsc-3.4.5 2014-06-29
1: #include <petsc-private/dmimpl.h>
2: #include <petscdmredundant.h> /*I "petscdmredundant.h" I*/
4: typedef struct {
5: PetscInt rank; /* owner */
6: PetscInt N; /* total number of dofs */
7: PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */
8: } DM_Redundant;
12: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,MatType mtype,Mat *J)
13: {
14: DM_Redundant *red = (DM_Redundant*)dm->data;
15: PetscErrorCode ierr;
16: ISLocalToGlobalMapping ltog,ltogb;
17: PetscInt i,rstart,rend,*cols;
18: PetscScalar *vals;
21: MatCreate(PetscObjectComm((PetscObject)dm),J);
22: MatSetSizes(*J,red->n,red->n,red->N,red->N);
23: MatSetType(*J,mtype);
24: MatSeqAIJSetPreallocation(*J,red->n,NULL);
25: MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
26: MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
27: MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);
29: DMGetLocalToGlobalMapping(dm,<og);
30: DMGetLocalToGlobalMappingBlock(dm,<ogb);
31: MatSetLocalToGlobalMapping(*J,ltog,ltog);
32: MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);
34: PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);
35: for (i=0; i<red->N; i++) {
36: cols[i] = i;
37: vals[i] = 0.0;
38: }
39: MatGetOwnershipRange(*J,&rstart,&rend);
40: for (i=rstart; i<rend; i++) {
41: MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
42: }
43: PetscFree2(cols,vals);
44: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
45: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
46: return(0);
47: }
51: static PetscErrorCode DMDestroy_Redundant(DM dm)
52: {
56: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
57: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
58: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
59: PetscFree(dm->data);
60: return(0);
61: }
65: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
66: {
67: PetscErrorCode ierr;
68: DM_Redundant *red = (DM_Redundant*)dm->data;
69: ISLocalToGlobalMapping ltog,ltogb;
74: *gvec = 0;
75: VecCreate(PetscObjectComm((PetscObject)dm),gvec);
76: VecSetSizes(*gvec,red->n,red->N);
77: VecSetType(*gvec,dm->vectype);
78: DMGetLocalToGlobalMapping(dm,<og);
79: DMGetLocalToGlobalMappingBlock(dm,<ogb);
80: VecSetLocalToGlobalMapping(*gvec,ltog);
81: VecSetLocalToGlobalMappingBlock(*gvec,ltog);
82: VecSetDM(*gvec,dm);
83: return(0);
84: }
88: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
89: {
91: DM_Redundant *red = (DM_Redundant*)dm->data;
96: *lvec = 0;
97: VecCreate(PETSC_COMM_SELF,lvec);
98: VecSetSizes(*lvec,red->N,red->N);
99: VecSetType(*lvec,dm->vectype);
100: VecSetDM(*lvec,dm);
101: return(0);
102: }
106: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
107: {
108: PetscErrorCode ierr;
109: DM_Redundant *red = (DM_Redundant*)dm->data;
110: const PetscScalar *lv;
111: PetscScalar *gv;
112: PetscMPIInt rank;
115: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
116: VecGetArrayRead(l,&lv);
117: VecGetArray(g,&gv);
118: switch (imode) {
119: case ADD_VALUES:
120: case MAX_VALUES:
121: {
122: void *source;
123: PetscScalar *buffer;
124: PetscInt i;
125: if (rank == red->rank) {
126: #if defined(PETSC_HAVE_MPI_IN_PLACE)
127: buffer = gv;
128: source = MPI_IN_PLACE;
129: #else
130: PetscMalloc(red->N*sizeof(PetscScalar),&buffer);
131: source = buffer;
132: #endif
133: if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
134: #if !defined(PETSC_USE_COMPLEX)
135: if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
136: #endif
137: } else source = (void*)lv;
138: MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPI_SUM : MPI_MAX,red->rank,PetscObjectComm((PetscObject)dm));
139: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
140: if (rank == red->rank) {PetscFree(buffer);}
141: #endif
142: } break;
143: case INSERT_VALUES:
144: PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));
145: break;
146: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
147: }
148: VecRestoreArrayRead(l,&lv);
149: VecRestoreArray(g,&gv);
150: return(0);
151: }
155: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
156: {
158: return(0);
159: }
163: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
164: {
165: PetscErrorCode ierr;
166: DM_Redundant *red = (DM_Redundant*)dm->data;
167: const PetscScalar *gv;
168: PetscScalar *lv;
171: VecGetArrayRead(g,&gv);
172: VecGetArray(l,&lv);
173: switch (imode) {
174: case INSERT_VALUES:
175: if (red->n) {PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));}
176: MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
177: break;
178: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
179: }
180: VecRestoreArrayRead(g,&gv);
181: VecRestoreArray(l,&lv);
182: return(0);
183: }
187: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
188: {
190: return(0);
191: }
195: static PetscErrorCode DMSetUp_Redundant(DM dm)
196: {
198: DM_Redundant *red = (DM_Redundant*)dm->data;
199: PetscInt i,*globals;
202: PetscMalloc(red->N*sizeof(PetscInt),&globals);
203: for (i=0; i<red->N; i++) globals[i] = i;
204: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
205: PetscObjectReference((PetscObject)dm->ltogmap);
206: dm->ltogmapb = dm->ltogmap;
207: return(0);
208: }
212: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
213: {
215: DM_Redundant *red = (DM_Redundant*)dm->data;
216: PetscBool iascii;
219: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
220: if (iascii) {
221: PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
222: }
223: return(0);
224: }
228: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
229: {
230: DM_Redundant *red = (DM_Redundant*)dm->data;
231: PetscErrorCode ierr;
232: PetscInt i,nloc;
233: ISColoringValue *colors;
236: switch (ctype) {
237: case IS_COLORING_GLOBAL:
238: nloc = red->n;
239: break;
240: case IS_COLORING_GHOSTED:
241: nloc = red->N;
242: break;
243: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
244: }
245: PetscMalloc(nloc*sizeof(ISColoringValue),&colors);
246: for (i=0; i<nloc; i++) colors[i] = i;
247: ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,coloring);
248: ISColoringSetType(*coloring,ctype);
249: return(0);
250: }
254: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
255: {
257: PetscMPIInt flag;
258: DM_Redundant *redc = (DM_Redundant*)dmc->data;
261: if (comm == MPI_COMM_NULL) {
262: PetscObjectGetComm((PetscObject)dmc,&comm);
263: }
264: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
265: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
266: DMRedundantCreate(comm,redc->rank,redc->N,dmf);
267: return(0);
268: }
272: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
273: {
275: PetscMPIInt flag;
276: DM_Redundant *redf = (DM_Redundant*)dmf->data;
279: if (comm == MPI_COMM_NULL) {
280: PetscObjectGetComm((PetscObject)dmf,&comm);
281: }
282: MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
283: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
284: DMRedundantCreate(comm,redf->rank,redf->N,dmc);
285: return(0);
286: }
290: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
291: {
293: DM_Redundant *redc = (DM_Redundant*)dmc->data;
294: DM_Redundant *redf = (DM_Redundant*)dmf->data;
295: PetscMPIInt flag;
296: PetscInt i,rstart,rend;
299: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
300: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
301: if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
302: if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
303: MatCreate(PetscObjectComm((PetscObject)dmc),P);
304: MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
305: MatSetType(*P,MATAIJ);
306: MatSeqAIJSetPreallocation(*P,1,0);
307: MatMPIAIJSetPreallocation(*P,1,0,0,0);
308: MatGetOwnershipRange(*P,&rstart,&rend);
309: for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
310: MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
311: MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
312: if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
313: return(0);
314: }
318: /*@
319: DMRedundantSetSize - Sets the size of a densely coupled redundant object
321: Collective on DM
323: Input Parameter:
324: + dm - redundant DM
325: . rank - rank of process to own redundant degrees of freedom
326: - N - total number of redundant degrees of freedom
328: Level: advanced
330: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
331: @*/
332: PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
333: {
341: PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));
342: return(0);
343: }
347: /*@
348: DMRedundantGetSize - Gets the size of a densely coupled redundant object
350: Not Collective
352: Input Parameter:
353: + dm - redundant DM
355: Output Parameters:
356: + rank - rank of process to own redundant degrees of freedom (or NULL)
357: - N - total number of redundant degrees of freedom (or NULL)
359: Level: advanced
361: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
362: @*/
363: PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
364: {
370: PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));
371: return(0);
372: }
376: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
377: {
378: DM_Redundant *red = (DM_Redundant*)dm->data;
380: PetscMPIInt myrank;
383: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
384: red->rank = rank;
385: red->N = N;
386: red->n = (myrank == rank) ? N : 0;
387: return(0);
388: }
392: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
393: {
394: DM_Redundant *red = (DM_Redundant*)dm->data;
397: if (rank) *rank = red->rank;
398: if (N) *N = red->N;
399: return(0);
400: }
402: /*MC
403: DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
404: In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
405: no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
406: processes local computations).
408: This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
411: Level: intermediate
413: .seealso: DMType, DMCOMPOSITE, DMCreateRedundant(), DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
414: M*/
418: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
419: {
421: DM_Redundant *red;
424: PetscNewLog(dm,DM_Redundant,&red);
425: dm->data = red;
427: PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);
429: dm->ops->setup = DMSetUp_Redundant;
430: dm->ops->view = DMView_Redundant;
431: dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
432: dm->ops->createlocalvector = DMCreateLocalVector_Redundant;
433: dm->ops->creatematrix = DMCreateMatrix_Redundant;
434: dm->ops->destroy = DMDestroy_Redundant;
435: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
436: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant;
437: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
438: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant;
439: dm->ops->refine = DMRefine_Redundant;
440: dm->ops->coarsen = DMCoarsen_Redundant;
441: dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
442: dm->ops->getcoloring = DMCreateColoring_Redundant;
444: PetscStrallocpy(VECSTANDARD,(char**)&dm->vectype);
445: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
446: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
447: return(0);
448: }
452: /*@C
453: DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
455: Collective on MPI_Comm
457: Input Parameter:
458: + comm - the processors that will share the global vector
459: . rank - rank to own the redundant values
460: - N - total number of degrees of freedom
462: Output Parameters:
463: . red - the redundant DM
465: Level: advanced
467: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
469: @*/
470: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
471: {
476: DMCreate(comm,dm);
477: DMSetType(*dm,DMREDUNDANT);
478: DMRedundantSetSize(*dm,rank,N);
479: DMSetUp(*dm);
480: return(0);
481: }