Actual source code: dmredundant.c
petsc-3.7.7 2017-09-25
1: #include <petsc/private/dmimpl.h>
2: #include <petscdmredundant.h> /*I "petscdmredundant.h" I*/
4: typedef struct {
5: PetscMPIInt rank; /* owner */
6: PetscInt N; /* total number of dofs */
7: PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */
8: } DM_Redundant;
12: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
13: {
14: DM_Redundant *red = (DM_Redundant*)dm->data;
15: PetscErrorCode ierr;
16: ISLocalToGlobalMapping ltog;
17: PetscInt i,rstart,rend,*cols;
18: PetscScalar *vals;
21: MatCreate(PetscObjectComm((PetscObject)dm),J);
22: MatSetSizes(*J,red->n,red->n,red->N,red->N);
23: MatSetType(*J,dm->mattype);
24: MatSeqAIJSetPreallocation(*J,red->n,NULL);
25: MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
26: MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
27: MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);
29: DMGetLocalToGlobalMapping(dm,<og);
30: MatSetLocalToGlobalMapping(*J,ltog,ltog);
32: PetscMalloc2(red->N,&cols,red->N,&vals);
33: for (i=0; i<red->N; i++) {
34: cols[i] = i;
35: vals[i] = 0.0;
36: }
37: MatGetOwnershipRange(*J,&rstart,&rend);
38: for (i=rstart; i<rend; i++) {
39: MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
40: }
41: PetscFree2(cols,vals);
42: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
43: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
44: return(0);
45: }
49: static PetscErrorCode DMDestroy_Redundant(DM dm)
50: {
54: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
55: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
56: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
57: PetscFree(dm->data);
58: return(0);
59: }
63: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
64: {
65: PetscErrorCode ierr;
66: DM_Redundant *red = (DM_Redundant*)dm->data;
67: ISLocalToGlobalMapping ltog;
72: *gvec = 0;
73: VecCreate(PetscObjectComm((PetscObject)dm),gvec);
74: VecSetSizes(*gvec,red->n,red->N);
75: VecSetType(*gvec,dm->vectype);
76: DMGetLocalToGlobalMapping(dm,<og);
77: VecSetLocalToGlobalMapping(*gvec,ltog);
78: VecSetDM(*gvec,dm);
79: return(0);
80: }
84: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
85: {
87: DM_Redundant *red = (DM_Redundant*)dm->data;
92: *lvec = 0;
93: VecCreate(PETSC_COMM_SELF,lvec);
94: VecSetSizes(*lvec,red->N,red->N);
95: VecSetType(*lvec,dm->vectype);
96: VecSetDM(*lvec,dm);
97: return(0);
98: }
102: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
103: {
104: PetscErrorCode ierr;
105: DM_Redundant *red = (DM_Redundant*)dm->data;
106: const PetscScalar *lv;
107: PetscScalar *gv;
108: PetscMPIInt rank;
111: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
112: VecGetArrayRead(l,&lv);
113: VecGetArray(g,&gv);
114: switch (imode) {
115: case ADD_VALUES:
116: case MAX_VALUES:
117: {
118: void *source;
119: PetscScalar *buffer;
120: PetscInt i;
121: if (rank == red->rank) {
122: #if defined(PETSC_HAVE_MPI_IN_PLACE)
123: buffer = gv;
124: source = MPI_IN_PLACE;
125: #else
126: PetscMalloc1(red->N,&buffer);
127: source = buffer;
128: #endif
129: if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
130: #if !defined(PETSC_USE_COMPLEX)
131: if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
132: #endif
133: } else source = (void*)lv;
134: MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));
135: #if !defined(PETSC_HAVE_MPI_IN_PLACE)
136: if (rank == red->rank) {PetscFree(buffer);}
137: #endif
138: } break;
139: case INSERT_VALUES:
140: PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));
141: break;
142: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
143: }
144: VecRestoreArrayRead(l,&lv);
145: VecRestoreArray(g,&gv);
146: return(0);
147: }
151: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
152: {
154: return(0);
155: }
159: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
160: {
161: PetscErrorCode ierr;
162: DM_Redundant *red = (DM_Redundant*)dm->data;
163: const PetscScalar *gv;
164: PetscScalar *lv;
167: VecGetArrayRead(g,&gv);
168: VecGetArray(l,&lv);
169: switch (imode) {
170: case INSERT_VALUES:
171: if (red->n) {PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));}
172: MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
173: break;
174: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
175: }
176: VecRestoreArrayRead(g,&gv);
177: VecRestoreArray(l,&lv);
178: return(0);
179: }
183: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
184: {
186: return(0);
187: }
191: static PetscErrorCode DMSetUp_Redundant(DM dm)
192: {
194: DM_Redundant *red = (DM_Redundant*)dm->data;
195: PetscInt i,*globals;
198: PetscMalloc1(red->N,&globals);
199: for (i=0; i<red->N; i++) globals[i] = i;
200: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
201: return(0);
202: }
206: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
207: {
209: DM_Redundant *red = (DM_Redundant*)dm->data;
210: PetscBool iascii;
213: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
214: if (iascii) {
215: PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
216: }
217: return(0);
218: }
222: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
223: {
224: DM_Redundant *red = (DM_Redundant*)dm->data;
225: PetscErrorCode ierr;
226: PetscInt i,nloc;
227: ISColoringValue *colors;
230: switch (ctype) {
231: case IS_COLORING_GLOBAL:
232: nloc = red->n;
233: break;
234: case IS_COLORING_GHOSTED:
235: nloc = red->N;
236: break;
237: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
238: }
239: PetscMalloc1(nloc,&colors);
240: for (i=0; i<nloc; i++) colors[i] = i;
241: ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);
242: ISColoringSetType(*coloring,ctype);
243: return(0);
244: }
248: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
249: {
251: PetscMPIInt flag;
252: DM_Redundant *redc = (DM_Redundant*)dmc->data;
255: if (comm == MPI_COMM_NULL) {
256: PetscObjectGetComm((PetscObject)dmc,&comm);
257: }
258: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
259: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
260: DMRedundantCreate(comm,redc->rank,redc->N,dmf);
261: return(0);
262: }
266: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
267: {
269: PetscMPIInt flag;
270: DM_Redundant *redf = (DM_Redundant*)dmf->data;
273: if (comm == MPI_COMM_NULL) {
274: PetscObjectGetComm((PetscObject)dmf,&comm);
275: }
276: MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
277: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
278: DMRedundantCreate(comm,redf->rank,redf->N,dmc);
279: return(0);
280: }
284: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
285: {
287: DM_Redundant *redc = (DM_Redundant*)dmc->data;
288: DM_Redundant *redf = (DM_Redundant*)dmf->data;
289: PetscMPIInt flag;
290: PetscInt i,rstart,rend;
293: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
294: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
295: if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
296: if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
297: MatCreate(PetscObjectComm((PetscObject)dmc),P);
298: MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
299: MatSetType(*P,MATAIJ);
300: MatSeqAIJSetPreallocation(*P,1,0);
301: MatMPIAIJSetPreallocation(*P,1,0,0,0);
302: MatGetOwnershipRange(*P,&rstart,&rend);
303: for (i=rstart; i<rend; i++) {MatSetValue(*P,i,i,1.0,INSERT_VALUES);}
304: MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
305: MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
306: if (scale) {DMCreateInterpolationScale(dmc,dmf,*P,scale);}
307: return(0);
308: }
312: /*@
313: DMRedundantSetSize - Sets the size of a densely coupled redundant object
315: Collective on DM
317: Input Parameter:
318: + dm - redundant DM
319: . rank - rank of process to own redundant degrees of freedom
320: - N - total number of redundant degrees of freedom
322: Level: advanced
324: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
325: @*/
326: PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
327: {
335: PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
336: return(0);
337: }
341: /*@
342: DMRedundantGetSize - Gets the size of a densely coupled redundant object
344: Not Collective
346: Input Parameter:
347: + dm - redundant DM
349: Output Parameters:
350: + rank - rank of process to own redundant degrees of freedom (or NULL)
351: - N - total number of redundant degrees of freedom (or NULL)
353: Level: advanced
355: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
356: @*/
357: PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
358: {
364: PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
365: return(0);
366: }
370: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
371: {
372: DM_Redundant *red = (DM_Redundant*)dm->data;
374: PetscMPIInt myrank;
377: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
378: red->rank = rank;
379: red->N = N;
380: red->n = (myrank == rank) ? N : 0;
381: return(0);
382: }
386: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
387: {
388: DM_Redundant *red = (DM_Redundant*)dm->data;
391: if (rank) *rank = red->rank;
392: if (N) *N = red->N;
393: return(0);
394: }
396: /*MC
397: DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
398: In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
399: no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
400: processes local computations).
402: This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
405: Level: intermediate
407: .seealso: DMType, DMCOMPOSITE, DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
408: M*/
412: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
413: {
415: DM_Redundant *red;
418: PetscNewLog(dm,&red);
419: dm->data = red;
421: PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);
423: dm->ops->setup = DMSetUp_Redundant;
424: dm->ops->view = DMView_Redundant;
425: dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
426: dm->ops->createlocalvector = DMCreateLocalVector_Redundant;
427: dm->ops->creatematrix = DMCreateMatrix_Redundant;
428: dm->ops->destroy = DMDestroy_Redundant;
429: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
430: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant;
431: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
432: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant;
433: dm->ops->refine = DMRefine_Redundant;
434: dm->ops->coarsen = DMCoarsen_Redundant;
435: dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
436: dm->ops->getcoloring = DMCreateColoring_Redundant;
438: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
439: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
440: return(0);
441: }
445: /*@C
446: DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
448: Collective on MPI_Comm
450: Input Parameter:
451: + comm - the processors that will share the global vector
452: . rank - rank to own the redundant values
453: - N - total number of degrees of freedom
455: Output Parameters:
456: . dm - the redundant DM
458: Level: advanced
460: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
462: @*/
463: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
464: {
469: DMCreate(comm,dm);
470: DMSetType(*dm,DMREDUNDANT);
471: DMRedundantSetSize(*dm,rank,N);
472: DMSetUp(*dm);
473: return(0);
474: }