Actual source code: dmredundant.c
1: #include <petsc/private/dmimpl.h>
2: #include <petscdmredundant.h>
4: typedef struct {
5: PetscMPIInt rank; /* owner */
6: PetscInt N; /* total number of dofs */
7: PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */
8: } DM_Redundant;
10: static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11: {
12: DM_Redundant *red = (DM_Redundant*)dm->data;
13: ISLocalToGlobalMapping ltog;
14: PetscInt i,rstart,rend,*cols;
15: PetscScalar *vals;
17: MatCreate(PetscObjectComm((PetscObject)dm),J);
18: MatSetSizes(*J,red->n,red->n,red->N,red->N);
19: MatSetType(*J,dm->mattype);
20: MatSeqAIJSetPreallocation(*J,red->n,NULL);
21: MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);
22: MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);
23: MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);
25: DMGetLocalToGlobalMapping(dm,<og);
26: MatSetLocalToGlobalMapping(*J,ltog,ltog);
27: MatSetDM(*J,dm);
29: PetscMalloc2(red->N,&cols,red->N,&vals);
30: for (i=0; i<red->N; i++) {
31: cols[i] = i;
32: vals[i] = 0.0;
33: }
34: MatGetOwnershipRange(*J,&rstart,&rend);
35: for (i=rstart; i<rend; i++) {
36: MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);
37: }
38: PetscFree2(cols,vals);
39: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
40: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
41: return 0;
42: }
44: static PetscErrorCode DMDestroy_Redundant(DM dm)
45: {
46: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);
47: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);
48: PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);
49: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
50: PetscFree(dm->data);
51: return 0;
52: }
54: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
55: {
56: DM_Redundant *red = (DM_Redundant*)dm->data;
57: ISLocalToGlobalMapping ltog;
61: *gvec = NULL;
62: VecCreate(PetscObjectComm((PetscObject)dm),gvec);
63: VecSetSizes(*gvec,red->n,red->N);
64: VecSetType(*gvec,dm->vectype);
65: DMGetLocalToGlobalMapping(dm,<og);
66: VecSetLocalToGlobalMapping(*gvec,ltog);
67: VecSetDM(*gvec,dm);
68: return 0;
69: }
71: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
72: {
73: DM_Redundant *red = (DM_Redundant*)dm->data;
77: *lvec = NULL;
78: VecCreate(PETSC_COMM_SELF,lvec);
79: VecSetSizes(*lvec,red->N,red->N);
80: VecSetType(*lvec,dm->vectype);
81: VecSetDM(*lvec,dm);
82: return 0;
83: }
85: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
86: {
87: DM_Redundant *red = (DM_Redundant*)dm->data;
88: const PetscScalar *lv;
89: PetscScalar *gv;
90: PetscMPIInt rank;
92: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
93: VecGetArrayRead(l,&lv);
94: VecGetArray(g,&gv);
95: switch (imode) {
96: case ADD_VALUES:
97: case MAX_VALUES:
98: {
99: void *source;
100: PetscScalar *buffer;
101: PetscInt i;
102: if (rank == red->rank) {
103: buffer = gv;
104: source = MPI_IN_PLACE;
105: if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
106: #if !defined(PETSC_USE_COMPLEX)
107: if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
108: #endif
109: } else source = (void*)lv;
110: MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));
111: } break;
112: case INSERT_VALUES:
113: PetscArraycpy(gv,lv,red->n);
114: break;
115: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
116: }
117: VecRestoreArrayRead(l,&lv);
118: VecRestoreArray(g,&gv);
119: return 0;
120: }
122: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
123: {
124: return 0;
125: }
127: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
128: {
129: DM_Redundant *red = (DM_Redundant*)dm->data;
130: const PetscScalar *gv;
131: PetscScalar *lv;
133: VecGetArrayRead(g,&gv);
134: VecGetArray(l,&lv);
135: switch (imode) {
136: case INSERT_VALUES:
137: if (red->n) PetscArraycpy(lv,gv,red->n);
138: MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));
139: break;
140: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
141: }
142: VecRestoreArrayRead(g,&gv);
143: VecRestoreArray(l,&lv);
144: return 0;
145: }
147: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
148: {
149: return 0;
150: }
152: static PetscErrorCode DMSetUp_Redundant(DM dm)
153: {
154: return 0;
155: }
157: static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
158: {
159: DM_Redundant *red = (DM_Redundant*)dm->data;
160: PetscBool iascii;
162: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
163: if (iascii) {
164: PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);
165: }
166: return 0;
167: }
169: static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
170: {
171: DM_Redundant *red = (DM_Redundant*)dm->data;
172: PetscInt i,nloc;
173: ISColoringValue *colors;
175: switch (ctype) {
176: case IS_COLORING_GLOBAL:
177: nloc = red->n;
178: break;
179: case IS_COLORING_LOCAL:
180: nloc = red->N;
181: break;
182: default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
183: }
184: PetscMalloc1(nloc,&colors);
185: for (i=0; i<nloc; i++) colors[i] = i;
186: ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);
187: ISColoringSetType(*coloring,ctype);
188: return 0;
189: }
191: static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
192: {
193: PetscMPIInt flag;
194: DM_Redundant *redc = (DM_Redundant*)dmc->data;
196: if (comm == MPI_COMM_NULL) {
197: PetscObjectGetComm((PetscObject)dmc,&comm);
198: }
199: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);
201: DMRedundantCreate(comm,redc->rank,redc->N,dmf);
202: return 0;
203: }
205: static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
206: {
207: PetscMPIInt flag;
208: DM_Redundant *redf = (DM_Redundant*)dmf->data;
210: if (comm == MPI_COMM_NULL) {
211: PetscObjectGetComm((PetscObject)dmf,&comm);
212: }
213: MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);
215: DMRedundantCreate(comm,redf->rank,redf->N,dmc);
216: return 0;
217: }
219: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
220: {
221: DM_Redundant *redc = (DM_Redundant*)dmc->data;
222: DM_Redundant *redf = (DM_Redundant*)dmf->data;
223: PetscMPIInt flag;
224: PetscInt i,rstart,rend;
226: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);
230: MatCreate(PetscObjectComm((PetscObject)dmc),P);
231: MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);
232: MatSetType(*P,MATAIJ);
233: MatSeqAIJSetPreallocation(*P,1,NULL);
234: MatMPIAIJSetPreallocation(*P,1,NULL,0,NULL);
235: MatGetOwnershipRange(*P,&rstart,&rend);
236: for (i=rstart; i<rend; i++) MatSetValue(*P,i,i,1.0,INSERT_VALUES);
237: MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
238: MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
239: if (scale) DMCreateInterpolationScale(dmc,dmf,*P,scale);
240: return 0;
241: }
243: /*@
244: DMRedundantSetSize - Sets the size of a densely coupled redundant object
246: Collective on dm
248: Input Parameters:
249: + dm - redundant DM
250: . rank - rank of process to own redundant degrees of freedom
251: - N - total number of redundant degrees of freedom
253: Level: advanced
255: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
256: @*/
257: PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
258: {
263: PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));
264: return 0;
265: }
267: /*@
268: DMRedundantGetSize - Gets the size of a densely coupled redundant object
270: Not Collective
272: Input Parameter:
273: . dm - redundant DM
275: Output Parameters:
276: + rank - rank of process to own redundant degrees of freedom (or NULL)
277: - N - total number of redundant degrees of freedom (or NULL)
279: Level: advanced
281: .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
282: @*/
283: PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
284: {
287: PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));
288: return 0;
289: }
291: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
292: {
293: DM_Redundant *red = (DM_Redundant*)dm->data;
294: PetscMPIInt myrank;
295: PetscInt i,*globals;
297: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);
298: red->rank = rank;
299: red->N = N;
300: red->n = (myrank == rank) ? N : 0;
302: /* mapping is setup here */
303: PetscMalloc1(red->N,&globals);
304: for (i=0; i<red->N; i++) globals[i] = i;
305: ISLocalToGlobalMappingDestroy(&dm->ltogmap);
306: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);
307: return 0;
308: }
310: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
311: {
312: DM_Redundant *red = (DM_Redundant*)dm->data;
314: if (rank) *rank = red->rank;
315: if (N) *N = red->N;
316: return 0;
317: }
319: static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
320: {
321: return 0;
322: }
324: /*MC
325: DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
326: In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
327: no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
328: processes local computations).
330: This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
332: Level: intermediate
334: .seealso: DMType, DMCOMPOSITE, DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
335: M*/
337: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
338: {
339: DM_Redundant *red;
341: PetscNewLog(dm,&red);
342: dm->data = red;
344: dm->ops->setup = DMSetUp_Redundant;
345: dm->ops->view = DMView_Redundant;
346: dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
347: dm->ops->createlocalvector = DMCreateLocalVector_Redundant;
348: dm->ops->creatematrix = DMCreateMatrix_Redundant;
349: dm->ops->destroy = DMDestroy_Redundant;
350: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
351: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant;
352: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
353: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant;
354: dm->ops->refine = DMRefine_Redundant;
355: dm->ops->coarsen = DMCoarsen_Redundant;
356: dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
357: dm->ops->getcoloring = DMCreateColoring_Redundant;
359: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);
360: PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);
361: PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);
362: return 0;
363: }
365: /*@C
366: DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
368: Collective
370: Input Parameters:
371: + comm - the processors that will share the global vector
372: . rank - rank to own the redundant values
373: - N - total number of degrees of freedom
375: Output Parameters:
376: . dm - the redundant DM
378: Level: advanced
380: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
382: @*/
383: PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
384: {
386: DMCreate(comm,dm);
387: DMSetType(*dm,DMREDUNDANT);
388: DMRedundantSetSize(*dm,rank,N);
389: DMSetUp(*dm);
390: return 0;
391: }