Actual source code: dmredundant.c
petsc-3.8.4 2018-03-24
1: #include <petsc/private/dmimpl.h>
2: #include <petscdmredundant.h>
4: typedef struct {
5: PetscMPIInt rank; /* owner */
6: PetscInt N; /* total number of dofs */
7: PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */
8: } DM_Redundant;
10: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11: {
12: DM_Redundant *red = (DM_Redundant*)dm->data;
13: PetscErrorCode ierr;
14: ISLocalToGlobalMapping ltog;
15: PetscInt i,rstart,rend,*cols;
16: PetscScalar *vals;
19: MatCreate(PetscObjectComm((PetscObject)dm),J);
20: MatSetSizes(*J,red->n,red->n,red->N,red->N);
21: MatSetType(*J,dm->mattype);
22: MatSeqAIJSetPreallocation(*J,red->n,NULL);
23: MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
24: MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
25: MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);
27: DMGetLocalToGlobalMapping(dm,<og);
28: MatSetLocalToGlobalMapping(*J,ltog,ltog);
30: PetscMalloc2(red->N,&cols,red->N,&vals);
31: for (i=0; i<red->N; i++) {
32: cols[i] = i;
33: vals[i] = 0.0;
34: }
35: MatGetOwnershipRange(*J,&rstart,&rend);
36: for (i=rstart; i<rend; i++) {
37: MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
38: }
39: PetscFree2(cols,vals);
40: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
41: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
42: return(0);
43: }
45: static PetscErrorCode DMDestroy_Redundant(DM dm)
46: {
50: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
51: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
52: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
53: PetscFree(dm->data);
54: return(0);
55: }
57: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
58: {
59: PetscErrorCode ierr;
60: DM_Redundant *red = (DM_Redundant*)dm->data;
61: ISLocalToGlobalMapping ltog;
66: *gvec = 0;
67: VecCreate(PetscObjectComm((PetscObject)dm),gvec);
68: VecSetSizes(*gvec,red->n,red->N);
69: VecSetType(*gvec,dm->vectype);
70: DMGetLocalToGlobalMapping(dm,<og);
71: VecSetLocalToGlobalMapping(*gvec,ltog);
72: VecSetDM(*gvec,dm);
73: return(0);
74: }
76: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
77: {
79: DM_Redundant *red = (DM_Redundant*)dm->data;
84: *lvec = 0;
85: VecCreate(PETSC_COMM_SELF,lvec);
86: VecSetSizes(*lvec,red->N,red->N);
87: VecSetType(*lvec,dm->vectype);
88: VecSetDM(*lvec,dm);
89: return(0);
90: }
92: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
93: {
94: PetscErrorCode ierr;
95: DM_Redundant *red = (DM_Redundant*)dm->data;
96: const PetscScalar *lv;
97: PetscScalar *gv;
98: PetscMPIInt rank;
101: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
102: VecGetArrayRead(l,&lv);
103: VecGetArray(g,&gv);
104: switch (imode) {
105: case ADD_VALUES:
106: case MAX_VALUES:
107: {
108: void *source;
109: PetscScalar *buffer;
110: PetscInt i;
111: if (rank == red->rank) {
112: #if defined(PETSC_HAVE_MPI_IN_PLACE)
113: buffer = gv;
114: source = MPI_IN_PLACE;
115: #else
116: PetscMalloc1(red->N,&buffer);
117: source = buffer;
118: #endif
119: if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
120: #if !defined(PETSC_USE_COMPLEX)
121: if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
122: #endif
123: } else source = (void*)lv;
124: MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));
125: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
126: if (rank == red->rank) {PetscFree(buffer);}
127: #endif
128: } break;
129: case INSERT_VALUES:
130: PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));
131: break;
132: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
133: }
134: VecRestoreArrayRead(l,&lv);
135: VecRestoreArray(g,&gv);
136: return(0);
137: }
139: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
140: {
142: return(0);
143: }
145: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
146: {
147: PetscErrorCode ierr;
148: DM_Redundant *red = (DM_Redundant*)dm->data;
149: const PetscScalar *gv;
150: PetscScalar *lv;
153: VecGetArrayRead(g,&gv);
154: VecGetArray(l,&lv);
155: switch (imode) {
156: case INSERT_VALUES:
157: if (red->n) {PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));}
158: MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
159: break;
160: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
161: }
162: VecRestoreArrayRead(g,&gv);
163: VecRestoreArray(l,&lv);
164: return(0);
165: }
167: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
168: {
170: return(0);
171: }
173: static PetscErrorCode DMSetUp_Redundant(DM dm)
174: {
176: DM_Redundant *red = (DM_Redundant*)dm->data;
177: PetscInt i,*globals;
180: PetscMalloc1(red->N,&globals);
181: for (i=0; i<red->N; i++) globals[i] = i;
182: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
183: return(0);
184: }
186: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
187: {
189: DM_Redundant *red = (DM_Redundant*)dm->data;
190: PetscBool iascii;
193: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
194: if (iascii) {
195: PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
196: }
197: return(0);
198: }
200: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
201: {
202: DM_Redundant *red = (DM_Redundant*)dm->data;
203: PetscErrorCode ierr;
204: PetscInt i,nloc;
205: ISColoringValue *colors;
208: switch (ctype) {
209: case IS_COLORING_GLOBAL:
210: nloc = red->n;
211: break;
212: case IS_COLORING_LOCAL:
213: nloc = red->N;
214: break;
215: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
216: }
217: PetscMalloc1(nloc,&colors);
218: for (i=0; i<nloc; i++) colors[i] = i;
219: ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);
220: ISColoringSetType(*coloring,ctype);
221: return(0);
222: }
224: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
225: {
227: PetscMPIInt flag;
228: DM_Redundant *redc = (DM_Redundant*)dmc->data;
231: if (comm == MPI_COMM_NULL) {
232: PetscObjectGetComm((PetscObject)dmc,&comm);
233: }
234: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
235: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
236: DMRedundantCreate(comm,redc->rank,redc->N,dmf);
237: return(0);
238: }
240: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
241: {
243: PetscMPIInt flag;
244: DM_Redundant *redf = (DM_Redundant*)dmf->data;
247: if (comm == MPI_COMM_NULL) {
248: PetscObjectGetComm((PetscObject)dmf,&comm);
249: }
250: MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
251: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
252: DMRedundantCreate(comm,redf->rank,redf->N,dmc);
253: return(0);
254: }
256: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
257: {
259: DM_Redundant *redc = (DM_Redundant*)dmc->data;
260: DM_Redundant *redf = (DM_Redundant*)dmf->data;
261: PetscMPIInt flag;
262: PetscInt i,rstart,rend;
265: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
266: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
267: if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
268: if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
269: MatCreate(PetscObjectComm((PetscObject)dmc),P);
270: MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
271: MatSetType(*P,MATAIJ);
272: MatSeqAIJSetPreallocation(*P,1,0);
273: MatMPIAIJSetPreallocation(*P,1,0,0,0);
274: MatGetOwnershipRange(*P,&rstart,&rend);
275: for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
276: MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
277: MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
278: if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
279: return(0);
280: }
282: /*@
283: DMRedundantSetSize - Sets the size of a densely coupled redundant object
285: Collective on DM
287: Input Parameter:
288: + dm - redundant DM
289: . rank - rank of process to own redundant degrees of freedom
290: - N - total number of redundant degrees of freedom
292: Level: advanced
294: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
295: @*/
296: PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
297: {
305: PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
306: return(0);
307: }
309: /*@
310: DMRedundantGetSize - Gets the size of a densely coupled redundant object
312: Not Collective
314: Input Parameter:
315: + dm - redundant DM
317: Output Parameters:
318: + rank - rank of process to own redundant degrees of freedom (or NULL)
319: - N - total number of redundant degrees of freedom (or NULL)
321: Level: advanced
323: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
324: @*/
325: PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
326: {
332: PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
333: return(0);
334: }
336: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
337: {
338: DM_Redundant *red = (DM_Redundant*)dm->data;
340: PetscMPIInt myrank;
343: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
344: red->rank = rank;
345: red->N = N;
346: red->n = (myrank == rank) ? N : 0;
347: return(0);
348: }
350: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
351: {
352: DM_Redundant *red = (DM_Redundant*)dm->data;
355: if (rank) *rank = red->rank;
356: if (N) *N = red->N;
357: return(0);
358: }
360: /*MC
361: DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
362: In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
363: no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
364: processes local computations).
366: This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
369: Level: intermediate
371: .seealso: DMType, DMCOMPOSITE, DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
372: M*/
374: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
375: {
377: DM_Redundant *red;
380: PetscNewLog(dm,&red);
381: dm->data = red;
383: PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);
385: dm->ops->setup = DMSetUp_Redundant;
386: dm->ops->view = DMView_Redundant;
387: dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
388: dm->ops->createlocalvector = DMCreateLocalVector_Redundant;
389: dm->ops->creatematrix = DMCreateMatrix_Redundant;
390: dm->ops->destroy = DMDestroy_Redundant;
391: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
392: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant;
393: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
394: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant;
395: dm->ops->refine = DMRefine_Redundant;
396: dm->ops->coarsen = DMCoarsen_Redundant;
397: dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
398: dm->ops->getcoloring = DMCreateColoring_Redundant;
400: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
401: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
402: return(0);
403: }
405: /*@C
406: DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
408: Collective on MPI_Comm
410: Input Parameter:
411: + comm - the processors that will share the global vector
412: . rank - rank to own the redundant values
413: - N - total number of degrees of freedom
415: Output Parameters:
416: . dm - the redundant DM
418: Level: advanced
420: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
422: @*/
423: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
424: {
429: DMCreate(comm,dm);
430: DMSetType(*dm,DMREDUNDANT);
431: DMRedundantSetSize(*dm,rank,N);
432: DMSetUp(*dm);
433: return(0);
434: }