Actual source code: ex73.c
petsc-3.13.6 2020-09-29
1: /*T
2: Concepts: KSP^solving a system of linear equations
3: Concepts: KSP^Laplacian, 2d
4: Processors: n
5: T*/
7: /*
8: This example was derived from src/ksp/ksp/tutorials ex29.c
9:
10: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
12: -div \rho grad u = f, 0 < x,y < 1,
14: with forcing function
16: f = e^{-x^2/\nu} e^{-y^2/\nu}
18: with Dirichlet boundary conditions
20: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
22: or pure Neumman boundary conditions.
23: */
25: static char help[] = "Solves 2D inhomogeneous Laplacian. Demonstates using PCTelescopeSetCoarseDM functionality of PCTelescope via a DMShell\n\n";
27: #include <petscdm.h>
28: #include <petscdmda.h>
29: #include <petscdmshell.h>
30: #include <petscksp.h>
32: PetscErrorCode ComputeMatrix_ShellDA(KSP,Mat,Mat,void*);
33: PetscErrorCode ComputeMatrix_DMDA(DM,Mat,Mat,void*);
34: PetscErrorCode ComputeRHS_DMDA(DM,Vec,void*);
35: PetscErrorCode DMShellCreate_ShellDA(DM,DM*);
36: PetscErrorCode DMFieldScatter_ShellDA(DM,Vec,ScatterMode,DM,Vec);
37: PetscErrorCode DMStateScatter_ShellDA(DM,ScatterMode,DM);
39: typedef enum {DIRICHLET, NEUMANN} BCType;
41: typedef struct {
42: PetscReal rho;
43: PetscReal nu;
44: BCType bcType;
45: MPI_Comm comm;
46: } UserContext;
49: PetscErrorCode UserContextCreate(MPI_Comm comm,UserContext **ctx)
50: {
51: UserContext *user;
52: const char *bcTypes[2] = {"dirichlet","neumann"};
53: PetscInt bc;
57: PetscCalloc1(1,&user);
58: user->comm = comm;
59: PetscOptionsBegin(comm, "", "Options for the inhomogeneous Poisson equation", "DMqq");
60: user->rho = 1.0;
61: PetscOptionsReal("-rho", "The conductivity", "ex29.c", user->rho, &user->rho, NULL);
62: user->nu = 0.1;
63: PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user->nu, &user->nu, NULL);
64: bc = (PetscInt)DIRICHLET;
65: PetscOptionsEList("-bc_type","Type of boundary condition","ex29.c",bcTypes,2,bcTypes[0],&bc,NULL);
66: user->bcType = (BCType)bc;
67: PetscOptionsEnd();
68: *ctx = user;
69: return(0);
70: }
72: PetscErrorCode CommCoarsen(MPI_Comm comm,PetscInt number,PetscSubcomm *p)
73: {
74: PetscSubcomm psubcomm;
77: PetscSubcommCreate(comm,&psubcomm);
78: PetscSubcommSetNumber(psubcomm,number);
79: PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);
80: *p = psubcomm;
81: return(0);
82: }
84: PetscErrorCode CommHierarchyCreate(MPI_Comm comm,PetscInt n,PetscInt number[],PetscSubcomm pscommlist[])
85: {
86: PetscInt k;
87: PetscBool view_hierarchy = PETSC_FALSE;
91: for (k=0; k<n; k++) {
92: pscommlist[k] = NULL;
93: }
95: if (n < 1) return(0);
97: CommCoarsen(comm,number[n-1],&pscommlist[n-1]);
98: for (k=n-2; k>=0; k--) {
99: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k+1]);
100: if (pscommlist[k+1]->color == 0) {
101: CommCoarsen(comm_k,number[k],&pscommlist[k]);
102: }
103: }
105: PetscOptionsGetBool(NULL,NULL,"-view_hierarchy",&view_hierarchy,NULL);
106: if (view_hierarchy) {
107: PetscMPIInt size;
109: MPI_Comm_size(comm,&size);
110: PetscPrintf(comm,"level[%D] size %d\n",n,(int)size);
111: for (k=n-1; k>=0; k--) {
112: if (pscommlist[k]) {
113: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
115: if (pscommlist[k]->color == 0) {
116: MPI_Comm_size(comm_k,&size);
117: PetscPrintf(comm_k,"level[%D] size %d\n",k,(int)size);
118: }
119: }
120: }
121: }
122: return(0);
123: }
125: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
126: static PetscErrorCode _DMDADetermineRankFromGlobalIJ_2d(PetscInt i,PetscInt j,PetscInt Mp,PetscInt Np,
127: PetscInt start_i[],PetscInt start_j[],
128: PetscInt span_i[],PetscInt span_j[],
129: PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *rank_re)
130: {
131: PetscInt pi,pj,n;
134: *rank_re = -1;
135: pi = pj = -1;
136: if (_pi) {
137: for (n=0; n<Mp; n++) {
138: if ( (i >= start_i[n]) && (i < start_i[n]+span_i[n]) ) {
139: pi = n;
140: break;
141: }
142: }
143: if (pi == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ij] pi cannot be determined : range %D, val %D",Mp,i);
144: *_pi = (PetscMPIInt)pi;
145: }
147: if (_pj) {
148: for (n=0; n<Np; n++) {
149: if ( (j >= start_j[n]) && (j < start_j[n]+span_j[n]) ) {
150: pj = n;
151: break;
152: }
153: }
154: if (pj == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ij] pj cannot be determined : range %D, val %D",Np,j);
155: *_pj = (PetscMPIInt)pj;
156: }
158: *rank_re = (PetscMPIInt)(pi + pj * Mp);
159: return(0);
160: }
162: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
163: static PetscErrorCode _DMDADetermineGlobalS0_2d(PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,
164: PetscInt range_i_re[],PetscInt range_j_re[],PetscInt *s0)
165: {
166: PetscInt i,j,start_IJ = 0;
167: PetscMPIInt rank_ij;
170: *s0 = -1;
171: for (j=0; j<Np_re; j++) {
172: for (i=0; i<Mp_re; i++) {
173: rank_ij = (PetscMPIInt)(i + j*Mp_re);
174: if (rank_ij < rank_re) {
175: start_IJ += range_i_re[i]*range_j_re[j];
176: }
177: }
178: }
179: *s0 = start_IJ;
180: return(0);
181: }
183: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
184: static PetscErrorCode DMDACreatePermutation_2d(DM dmrepart,DM dmf,Mat *mat)
185: {
187: PetscInt k,sum,Mp_re = 0,Np_re = 0;
188: PetscInt nx,ny,sr,er,Mr,ndof;
189: PetscInt i,j,location,startI[2],endI[2],lenI[2];
190: const PetscInt *_range_i_re = NULL,*_range_j_re = NULL;
191: PetscInt *range_i_re,*range_j_re;
192: PetscInt *start_i_re,*start_j_re;
193: MPI_Comm comm;
194: Vec V;
195: Mat Pscalar;
198: PetscInfo(dmf,"setting up the permutation matrix (DMDA-2D)\n");
199: PetscObjectGetComm((PetscObject)dmf,&comm);
201: _range_i_re = _range_j_re = NULL;
202: /* Create DMDA on the child communicator */
203: if (dmrepart) {
204: DMDAGetInfo(dmrepart,NULL,NULL,NULL,NULL,&Mp_re,&Np_re,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
205: DMDAGetOwnershipRanges(dmrepart,&_range_i_re,&_range_j_re,NULL);
206: }
208: /* note - assume rank 0 always participates */
209: MPI_Bcast(&Mp_re,1,MPIU_INT,0,comm);
210: MPI_Bcast(&Np_re,1,MPIU_INT,0,comm);
212: PetscCalloc1(Mp_re,&range_i_re);
213: PetscCalloc1(Np_re,&range_j_re);
215: if (_range_i_re) {PetscArraycpy(range_i_re,_range_i_re,Mp_re);}
216: if (_range_j_re) {PetscArraycpy(range_j_re,_range_j_re,Np_re);}
218: MPI_Bcast(range_i_re,Mp_re,MPIU_INT,0,comm);
219: MPI_Bcast(range_j_re,Np_re,MPIU_INT,0,comm);
221: PetscMalloc1(Mp_re,&start_i_re);
222: PetscMalloc1(Np_re,&start_j_re);
224: sum = 0;
225: for (k=0; k<Mp_re; k++) {
226: start_i_re[k] = sum;
227: sum += range_i_re[k];
228: }
230: sum = 0;
231: for (k=0; k<Np_re; k++) {
232: start_j_re[k] = sum;
233: sum += range_j_re[k];
234: }
236: /* Create permutation */
237: DMDAGetInfo(dmf,NULL,&nx,&ny,NULL,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
238: DMGetGlobalVector(dmf,&V);
239: VecGetSize(V,&Mr);
240: VecGetOwnershipRange(V,&sr,&er);
241: DMRestoreGlobalVector(dmf,&V);
242: sr = sr / ndof;
243: er = er / ndof;
244: Mr = Mr / ndof;
246: MatCreate(comm,&Pscalar);
247: MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
248: MatSetType(Pscalar,MATAIJ);
249: MatSeqAIJSetPreallocation(Pscalar,1,NULL);
250: MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);
252: DMDAGetCorners(dmf,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
253: DMDAGetCorners(dmf,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
254: endI[0] += startI[0];
255: endI[1] += startI[1];
257: for (j=startI[1]; j<endI[1]; j++) {
258: for (i=startI[0]; i<endI[0]; i++) {
259: PetscMPIInt rank_ijk_re,rank_reI[] = {0,0};
260: PetscInt s0_re;
261: PetscInt ii,jj,local_ijk_re,mapped_ijk;
262: PetscInt lenI_re[] = {0,0};
264: location = (i - startI[0]) + (j - startI[1])*lenI[0];
265: _DMDADetermineRankFromGlobalIJ_2d(i,j,Mp_re,Np_re,
266: start_i_re,start_j_re,
267: range_i_re,range_j_re,
268: &rank_reI[0],&rank_reI[1],&rank_ijk_re);
270: _DMDADetermineGlobalS0_2d(rank_ijk_re,Mp_re,Np_re,range_i_re,range_j_re,&s0_re);
272: ii = i - start_i_re[ rank_reI[0] ];
273: if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error ii");
274: jj = j - start_j_re[ rank_reI[1] ];
275: if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error jj");
277: lenI_re[0] = range_i_re[ rank_reI[0] ];
278: lenI_re[1] = range_j_re[ rank_reI[1] ];
279: local_ijk_re = ii + jj * lenI_re[0];
280: mapped_ijk = s0_re + local_ijk_re;
281: MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
282: }
283: }
284: MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
285: MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
287: *mat = Pscalar;
289: PetscFree(range_i_re);
290: PetscFree(range_j_re);
291: PetscFree(start_i_re);
292: PetscFree(start_j_re);
293: return(0);
294: }
296: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
297: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(DM dmf,DM dmc)
298: {
300: Vec xred,yred,xtmp,x,xp;
301: VecScatter scatter;
302: IS isin;
303: PetscInt m,bs,st,ed;
304: MPI_Comm comm;
305: VecType vectype;
308: PetscObjectGetComm((PetscObject)dmf,&comm);
309: DMCreateGlobalVector(dmf,&x);
310: VecGetBlockSize(x,&bs);
311: VecGetType(x,&vectype);
313: /* cannot use VecDuplicate as xp is already composed with dmf */
314: /*VecDuplicate(x,&xp);*/
315: {
316: PetscInt m,M;
318: VecGetSize(x,&M);
319: VecGetLocalSize(x,&m);
320: VecCreate(comm,&xp);
321: VecSetSizes(xp,m,M);
322: VecSetBlockSize(xp,bs);
323: VecSetType(xp,vectype);
324: }
326: m = 0;
327: xred = NULL;
328: yred = NULL;
329: if (dmc) {
330: DMCreateGlobalVector(dmc,&xred);
331: VecDuplicate(xred,&yred);
332: VecGetOwnershipRange(xred,&st,&ed);
333: ISCreateStride(comm,ed-st,st,1,&isin);
334: VecGetLocalSize(xred,&m);
335: } else {
336: VecGetOwnershipRange(x,&st,&ed);
337: ISCreateStride(comm,0,st,1,&isin);
338: }
339: ISSetBlockSize(isin,bs);
340: VecCreate(comm,&xtmp);
341: VecSetSizes(xtmp,m,PETSC_DECIDE);
342: VecSetBlockSize(xtmp,bs);
343: VecSetType(xtmp,vectype);
344: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
346: PetscObjectCompose((PetscObject)dmf,"isin",(PetscObject)isin);
347: PetscObjectCompose((PetscObject)dmf,"scatter",(PetscObject)scatter);
348: PetscObjectCompose((PetscObject)dmf,"xtmp",(PetscObject)xtmp);
349: PetscObjectCompose((PetscObject)dmf,"xp",(PetscObject)xp);
351: VecDestroy(&xred);
352: VecDestroy(&yred);
353: VecDestroy(&x);
354: return(0);
355: }
357: PetscErrorCode DMCreateMatrix_ShellDA(DM dm,Mat *A)
358: {
359: DM da;
360: MPI_Comm comm;
361: PetscMPIInt size;
362: UserContext *ctx = NULL;
363: PetscInt M,N;
367: DMShellGetContext(dm,(void**)&da);
368: PetscObjectGetComm((PetscObject)da,&comm);
369: MPI_Comm_size(comm,&size);
370: DMCreateMatrix(da,A);
371: MatGetSize(*A,&M,&N);
372: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA (%D x %D)\n",(PetscInt)size,M,N);
374: DMGetApplicationContext(dm,(void*)&ctx);
375: if (ctx->bcType == NEUMANN) {
376: MatNullSpace nullspace = NULL;
377: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: using neumann bcs\n",(PetscInt)size);
379: MatGetNullSpace(*A,&nullspace);
380: if (!nullspace) {
381: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator does not have nullspace - attaching\n",(PetscInt)size);
382: MatNullSpaceCreate(comm,PETSC_TRUE,0,0,&nullspace);
383: MatSetNullSpace(*A,nullspace);
384: MatNullSpaceDestroy(&nullspace);
385: } else {
386: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator already has a nullspace\n",(PetscInt)size);
387: }
388: }
389: return(0);
390: }
392: PetscErrorCode DMCreateGlobalVector_ShellDA(DM dm,Vec *x)
393: {
394: DM da;
397: DMShellGetContext(dm,(void**)&da);
398: DMCreateGlobalVector(da,x);
399: VecSetDM(*x,dm);
400: return(0);
401: }
403: PetscErrorCode DMCreateLocalVector_ShellDA(DM dm,Vec *x)
404: {
405: DM da;
408: DMShellGetContext(dm,(void**)&da);
409: DMCreateLocalVector(da,x);
410: VecSetDM(*x,dm);
411: return(0);
412: }
414: PetscErrorCode DMCoarsen_ShellDA(DM dm,MPI_Comm comm,DM *dmc)
415: {
418: *dmc = NULL;
419: DMGetCoarseDM(dm,dmc);
420: if (!*dmc) {
421: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"The coarse DM should never be NULL. The DM hierarchy should have already been defined");
422: } else {
423: PetscObjectReference((PetscObject)(*dmc));
424: }
425: return(0);
426: }
428: PetscErrorCode DMCreateInterpolation_ShellDA(DM dm1,DM dm2,Mat *mat,Vec *vec)
429: {
430: DM da1,da2;
433: DMShellGetContext(dm1,(void**)&da1);
434: DMShellGetContext(dm2,(void**)&da2);
435: DMCreateInterpolation(da1,da2,mat,vec);
436: return(0);
437: }
439: PetscErrorCode DMShellDASetUp_TelescopeDMScatter(DM dmf_shell,DM dmc_shell)
440: {
442: Mat P = NULL;
443: DM dmf = NULL,dmc = NULL;
446: DMShellGetContext(dmf_shell,(void**)&dmf);
447: if (dmc_shell) {
448: DMShellGetContext(dmc_shell,(void**)&dmc);
449: }
450: DMDACreatePermutation_2d(dmc,dmf,&P);
451: PetscObjectCompose((PetscObject)dmf,"P",(PetscObject)P);
452: PCTelescopeSetUp_dmda_scatters(dmf,dmc);
453: PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeFieldScatter",DMFieldScatter_ShellDA);
454: PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeStateScatter",DMStateScatter_ShellDA);
455: return(0);
456: }
458: PetscErrorCode DMShellDAFieldScatter_Forward(DM dmf,Vec x,DM dmc,Vec xc)
459: {
460: PetscErrorCode ierr;
461: Mat P = NULL;
462: Vec xp = NULL,xtmp = NULL;
463: VecScatter scatter = NULL;
464: const PetscScalar *x_array;
465: PetscInt i,st,ed;
468: PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
469: PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
470: PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
471: PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);
472: if (!P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
473: if (!xp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xp\" to be composed with the parent (fine) DM");
474: if (!scatter) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"scatter\" to be composed with the parent (fine) DM");
475: if (!xtmp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xtmp\" to be composed with the parent (fine) DM");
477: MatMultTranspose(P,x,xp);
479: /* pull in vector x->xtmp */
480: VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
481: VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
483: /* copy vector entires into xred */
484: VecGetArrayRead(xtmp,&x_array);
485: if (xc) {
486: PetscScalar *LA_xred;
487: VecGetOwnershipRange(xc,&st,&ed);
489: VecGetArray(xc,&LA_xred);
490: for (i=0; i<ed-st; i++) {
491: LA_xred[i] = x_array[i];
492: }
493: VecRestoreArray(xc,&LA_xred);
494: }
495: VecRestoreArrayRead(xtmp,&x_array);
496: return(0);
497: }
499: PetscErrorCode DMShellDAFieldScatter_Reverse(DM dmf,Vec y,DM dmc,Vec yc)
500: {
502: Mat P = NULL;
503: Vec xp = NULL,xtmp = NULL;
504: VecScatter scatter = NULL;
505: PetscScalar *array;
506: PetscInt i,st,ed;
509: PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
510: PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
511: PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
512: PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);
514: if (!P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
515: if (!xp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xp\" to be composed with the parent (fine) DM");
516: if (!scatter) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"scatter\" to be composed with the parent (fine) DM");
517: if (!xtmp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xtmp\" to be composed with the parent (fine) DM");
519: /* return vector */
520: VecGetArray(xtmp,&array);
521: if (yc) {
522: const PetscScalar *LA_yred;
523: VecGetOwnershipRange(yc,&st,&ed);
524: VecGetArrayRead(yc,&LA_yred);
525: for (i=0; i<ed-st; i++) {
526: array[i] = LA_yred[i];
527: }
528: VecRestoreArrayRead(yc,&LA_yred);
529: }
530: VecRestoreArray(xtmp,&array);
531: VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
532: VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
533: MatMult(P,xp,y);
534: return(0);
535: }
537: PetscErrorCode DMFieldScatter_ShellDA(DM dmf_shell,Vec x,ScatterMode mode,DM dmc_shell,Vec xc)
538: {
540: DM dmf = NULL,dmc = NULL;
543: DMShellGetContext(dmf_shell,(void**)&dmf);
544: if (dmc_shell) {
545: DMShellGetContext(dmc_shell,(void**)&dmc);
546: }
547: if (mode == SCATTER_FORWARD) {
548: DMShellDAFieldScatter_Forward(dmf,x,dmc,xc);
549: } else if (mode == SCATTER_REVERSE) {
550: DMShellDAFieldScatter_Reverse(dmf,x,dmc,xc);
551: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
552: return(0);
553: }
555: PetscErrorCode DMStateScatter_ShellDA(DM dmf_shell,ScatterMode mode,DM dmc_shell)
556: {
557: PetscMPIInt size_f = 0,size_c = 0;
560: MPI_Comm_size(PetscObjectComm((PetscObject)dmf_shell),&size_f);
561: if (dmc_shell) {
562: MPI_Comm_size(PetscObjectComm((PetscObject)dmc_shell),&size_c);
563: }
564: if (mode == SCATTER_FORWARD) {
565: PetscPrintf(PetscObjectComm((PetscObject)dmf_shell),"User supplied state scatter (fine [size %d]-> coarse [size %d])\n",(int)size_f,(int)size_c);
566: } else if (mode == SCATTER_REVERSE) {
567: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
568: return(0);
569: }
571: PetscErrorCode DMShellCreate_ShellDA(DM da,DM *dms)
572: {
575: if (da) {
576: DMShellCreate(PetscObjectComm((PetscObject)da),dms);
577: DMShellSetContext(*dms,(void*)da);
578: DMShellSetCreateGlobalVector(*dms,DMCreateGlobalVector_ShellDA);
579: DMShellSetCreateLocalVector(*dms,DMCreateLocalVector_ShellDA);
580: DMShellSetCreateMatrix(*dms,DMCreateMatrix_ShellDA);
581: DMShellSetCoarsen(*dms,DMCoarsen_ShellDA);
582: DMShellSetCreateInterpolation(*dms,DMCreateInterpolation_ShellDA);
583: } else {
584: *dms = NULL;
585: }
586: return(0);
587: }
589: PetscErrorCode DMDestroyShellDMDA(DM *_dm)
590: {
592: DM dm,da = NULL;
595: if (!_dm) return(0);
596: dm = *_dm;
597: if (!dm) return(0);
599: DMShellGetContext(dm,(void**)&da);
600: if (da) {
601: Vec vec;
602: VecScatter scatter = NULL;
603: IS is = NULL;
604: Mat P = NULL;
606: PetscObjectQuery((PetscObject)da,"P",(PetscObject*)&P);
607: MatDestroy(&P);
609: vec = NULL;
610: PetscObjectQuery((PetscObject)da,"xp",(PetscObject*)&vec);
611: VecDestroy(&vec);
613: PetscObjectQuery((PetscObject)da,"scatter",(PetscObject*)&scatter);
614: VecScatterDestroy(&scatter);
616: vec = NULL;
617: PetscObjectQuery((PetscObject)da,"xtmp",(PetscObject*)&vec);
618: VecDestroy(&vec);
620: PetscObjectQuery((PetscObject)da,"isin",(PetscObject*)&is);
621: ISDestroy(&is);
623: DMDestroy(&da);
624: }
625: DMDestroy(&dm);
626: *_dm = NULL;
627: return(0);
628: }
630: PetscErrorCode HierarchyCreate_Basic(DM *dm_f,DM *dm_c,UserContext *ctx)
631: {
633: DM dm,dmc,dm_shell,dmc_shell;
634: PetscMPIInt rank;
637: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
638: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dm);
639: DMSetFromOptions(dm);
640: DMSetUp(dm);
641: DMDASetUniformCoordinates(dm,0,1,0,1,0,0);
642: DMDASetFieldName(dm,0,"Pressure");
643: DMShellCreate_ShellDA(dm,&dm_shell);
644: DMSetApplicationContext(dm_shell,(void*)ctx);
646: dmc = NULL;
647: dmc_shell = NULL;
648: if (!rank) {
649: DMDACreate2d(PETSC_COMM_SELF,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmc);
650: DMSetFromOptions(dmc);
651: DMSetUp(dmc);
652: DMDASetUniformCoordinates(dmc,0,1,0,1,0,0);
653: DMDASetFieldName(dmc,0,"Pressure");
654: DMShellCreate_ShellDA(dmc,&dmc_shell);
655: DMSetApplicationContext(dmc_shell,(void*)ctx);
656: }
658: DMSetCoarseDM(dm_shell,dmc_shell);
659: DMShellDASetUp_TelescopeDMScatter(dm_shell,dmc_shell);
661: *dm_f = dm_shell;
662: *dm_c = dmc_shell;
663: return(0);
664: }
666: PetscErrorCode HierarchyCreate(PetscInt *_nd,PetscInt *_nref,MPI_Comm **_cl,DM **_dl)
667: {
668: PetscInt d,k,ndecomps,ncoarsen,found,nx;
669: PetscInt levelrefs,*number;
670: PetscSubcomm *pscommlist;
671: MPI_Comm *commlist;
672: DM *dalist,*dmlist;
673: PetscBool set;
677: ndecomps = 1;
678: PetscOptionsGetInt(NULL,NULL,"-ndecomps",&ndecomps,NULL);
679: ncoarsen = ndecomps - 1;
680: if (ncoarsen < 0) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"-ndecomps must be >= 1");
682: levelrefs = 2;
683: PetscOptionsGetInt(NULL,NULL,"-level_nrefs",&levelrefs,NULL);
684: PetscMalloc1(ncoarsen+1,&number);
685: for (k=0; k<ncoarsen+1; k++) {
686: number[k] = 2;
687: }
688: found = ncoarsen;
689: set = PETSC_FALSE;
690: PetscOptionsGetIntArray(NULL,NULL,"-level_comm_red_factor",number,&found,&set);
691: if (set) {
692: if (found != ncoarsen) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_USER,"Expected %D values for -level_comm_red_factor. Found %D",ncoarsen,found);
693: }
695: PetscMalloc1(ncoarsen+1,&pscommlist);
696: for (k=0; k<ncoarsen+1; k++) {
697: pscommlist[k] = NULL;
698: }
700: PetscMalloc1(ndecomps,&commlist);
701: for (k=0; k<ndecomps; k++) {
702: commlist[k] = MPI_COMM_NULL;
703: }
704: PetscMalloc1(ndecomps*levelrefs,&dalist);
705: PetscMalloc1(ndecomps*levelrefs,&dmlist);
706: for (k=0; k<ndecomps*levelrefs; k++) {
707: dalist[k] = NULL;
708: dmlist[k] = NULL;
709: }
711: CommHierarchyCreate(PETSC_COMM_WORLD,ncoarsen,number,pscommlist);
712: for (k=0; k<ncoarsen; k++) {
713: if (pscommlist[k]) {
714: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
715: if (pscommlist[k]->color == 0) {
716: PetscCommDuplicate(comm_k,&commlist[k],NULL);
717: }
718: }
719: }
720: PetscCommDuplicate(PETSC_COMM_WORLD,&commlist[ndecomps-1],NULL);
722: for (k=0; k<ncoarsen; k++) {
723: if (pscommlist[k]) {
724: PetscSubcommDestroy(&pscommlist[k]);
725: }
726: }
728: nx = 17;
729: PetscOptionsGetInt(NULL,NULL,"-m",&nx,NULL);
730: for (d=0; d<ndecomps; d++) {
731: DM dmroot = NULL;
732: char name[PETSC_MAX_PATH_LEN];
734: if (commlist[d] != MPI_COMM_NULL) {
735: DMDACreate2d(commlist[d],DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,nx,nx,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmroot);
736: DMSetUp(dmroot);
737: DMDASetUniformCoordinates(dmroot,0,1,0,1,0,0);
738: DMDASetFieldName(dmroot,0,"Pressure");
739: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"root-decomp-%D",d);
740: PetscObjectSetName((PetscObject)dmroot,name);
741: /*DMView(dmroot,PETSC_VIEWER_STDOUT_(commlist[d]));*/
742: }
744: dalist[d*levelrefs + 0] = dmroot;
745: for (k=1; k<levelrefs; k++) {
746: DM dmref = NULL;
748: if (commlist[d] != MPI_COMM_NULL) {
749: DMRefine(dalist[d*levelrefs + (k-1)],MPI_COMM_NULL,&dmref);
750: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"ref%D-decomp-%D",k,d);
751: PetscObjectSetName((PetscObject)dmref,name);
752: DMDAGetInfo(dmref,NULL,&nx,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
753: /*DMView(dmref,PETSC_VIEWER_STDOUT_(commlist[d]));*/
754: }
755: dalist[d*levelrefs + k] = dmref;
756: }
757: MPI_Allreduce(MPI_IN_PLACE,&nx,1,MPIU_INT,MPI_MAX,PETSC_COMM_WORLD);
758: }
760: /* create the hierarchy of DMShell's */
761: for (d=0; d<ndecomps; d++) {
762: char name[PETSC_MAX_PATH_LEN];
764: UserContext *ctx = NULL;
765: if (commlist[d] != MPI_COMM_NULL) {
766: UserContextCreate(commlist[d],&ctx);
767: for (k=0; k<levelrefs; k++) {
768: DMShellCreate_ShellDA(dalist[d*levelrefs + k],&dmlist[d*levelrefs + k]);
769: DMSetApplicationContext(dmlist[d*levelrefs + k],(void*)ctx);
770: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"level%D-decomp-%D",k,d);
771: PetscObjectSetName((PetscObject)dmlist[d*levelrefs + k],name);
772: }
773: }
774: }
776: /* set all the coarse DMs */
777: for (k=1; k<ndecomps*levelrefs; k++) { /* skip first DM as it doesn't have a coarse representation */
778: DM dmfine = NULL,dmcoarse = NULL;
780: dmfine = dmlist[k];
781: dmcoarse = dmlist[k-1];
782: if (dmfine) {
783: DMSetCoarseDM(dmfine,dmcoarse);
784: }
785: }
787: /* do special setup on the fine DM coupling different decompositions */
788: for (d=1; d<ndecomps; d++) { /* skip first decomposition as it doesn't have a coarse representation */
789: DM dmfine = NULL,dmcoarse = NULL;
791: dmfine = dmlist[d*levelrefs + 0];
792: dmcoarse = dmlist[(d-1)*levelrefs + (levelrefs-1)];
793: if (dmfine) {
794: DMShellDASetUp_TelescopeDMScatter(dmfine,dmcoarse);
795: }
796: }
798: PetscFree(number);
799: for (k=0; k<ncoarsen; k++) {
800: PetscSubcommDestroy(&pscommlist[k]);
801: }
802: PetscFree(pscommlist);
804: if (_nd) {
805: *_nd = ndecomps;
806: }
807: if (_nref) {
808: *_nref = levelrefs;
809: }
810: if (_cl) {
811: *_cl = commlist;
812: } else {
813: for (k=0; k<ndecomps; k++) {
814: if (commlist[k] != MPI_COMM_NULL) {
815: PetscCommDestroy(&commlist[k]);
816: }
817: }
818: PetscFree(commlist);
819: }
820: if (_dl) {
821: *_dl = dmlist;
822: PetscFree(dalist);
823: } else {
824: for (k=0; k<ndecomps*levelrefs; k++) {
825: DMDestroy(&dmlist[k]);
826: DMDestroy(&dalist[k]);
827: }
828: PetscFree(dmlist);
829: PetscFree(dalist);
830: }
831: return(0);
832: }
834: PetscErrorCode test_hierarchy(void)
835: {
837: PetscInt d,k,nd,nref;
838: MPI_Comm *comms;
839: DM *dms;
842: HierarchyCreate(&nd,&nref,&comms,&dms);
844: /* destroy user context */
845: for (d=0; d<nd; d++) {
846: DM first = dms[d*nref+0];
848: if (first) {
849: UserContext *ctx = NULL;
851: DMGetApplicationContext(first,(void*)&ctx);
852: if (ctx) { PetscFree(ctx); }
853: DMSetApplicationContext(first,NULL);
854: }
855: for (k=1; k<nref; k++) {
856: DM dm = dms[d*nref+k];
857: if (dm) {
858: DMSetApplicationContext(dm,NULL);
859: }
860: }
861: }
863: /* destroy DMs */
864: for (k=0; k<nd*nref; k++) {
865: if (dms[k]) {
866: DMDestroyShellDMDA(&dms[k]);
867: }
868: }
869: PetscFree(dms);
871: /* destroy communicators */
872: for (k=0; k<nd; k++) {
873: if (comms[k] != MPI_COMM_NULL) {
874: PetscCommDestroy(&comms[k]);
875: }
876: }
877: PetscFree(comms);
878: return(0);
879: }
881: PetscErrorCode test_basic(void)
882: {
884: DM dmF,dmdaF = NULL,dmC = NULL;
885: Mat A;
886: Vec x,b;
887: KSP ksp;
888: PC pc;
889: UserContext *user = NULL;
892: UserContextCreate(PETSC_COMM_WORLD,&user);
893: HierarchyCreate_Basic(&dmF,&dmC,user);
894: DMShellGetContext(dmF,(void**)&dmdaF);
896: DMCreateMatrix(dmF,&A);
897: DMCreateGlobalVector(dmF,&x);
898: DMCreateGlobalVector(dmF,&b);
899: ComputeRHS_DMDA(dmdaF,b,user);
901: KSPCreate(PETSC_COMM_WORLD,&ksp);
902: KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,(void*)user);
903: /*KSPSetOperators(ksp,A,A);*/
904: KSPSetDM(ksp,dmF);
905: KSPSetDMActive(ksp,PETSC_TRUE);
906: KSPSetFromOptions(ksp);
907: KSPGetPC(ksp,&pc);
908: PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
910: KSPSolve(ksp,b,x);
912: if (dmC) {
913: DMDestroyShellDMDA(&dmC);
914: }
915: DMDestroyShellDMDA(&dmF);
916: KSPDestroy(&ksp);
917: MatDestroy(&A);
918: VecDestroy(&b);
919: VecDestroy(&x);
920: PetscFree(user);
921: return(0);
922: }
924: PetscErrorCode test_mg(void)
925: {
927: DM dmF,dmdaF = NULL,*dms = NULL;
928: Mat A;
929: Vec x,b;
930: KSP ksp;
931: PetscInt k,d,nd,nref;
932: MPI_Comm *comms = NULL;
933: UserContext *user = NULL;
936: HierarchyCreate(&nd,&nref,&comms,&dms);
937: dmF = dms[nd*nref-1];
939: DMShellGetContext(dmF,(void**)&dmdaF);
940: DMGetApplicationContext(dmF,(void*)&user);
942: DMCreateMatrix(dmF,&A);
943: DMCreateGlobalVector(dmF,&x);
944: DMCreateGlobalVector(dmF,&b);
945: ComputeRHS_DMDA(dmdaF,b,user);
947: KSPCreate(PETSC_COMM_WORLD,&ksp);
948: KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,(void*)user);
949: /*KSPSetOperators(ksp,A,A);*/
950: KSPSetDM(ksp,dmF);
951: KSPSetDMActive(ksp,PETSC_TRUE);
952: KSPSetFromOptions(ksp);
954: KSPSolve(ksp,b,x);
956: for (d=0; d<nd; d++) {
957: DM first = dms[d*nref+0];
959: if (first) {
960: UserContext *ctx = NULL;
962: DMGetApplicationContext(first,(void*)&ctx);
963: if (ctx) { PetscFree(ctx); }
964: DMSetApplicationContext(first,NULL);
965: }
966: for (k=1; k<nref; k++) {
967: DM dm = dms[d*nref+k];
968: if (dm) {
969: DMSetApplicationContext(dm,NULL);
970: }
971: }
972: }
974: for (k=0; k<nd*nref; k++) {
975: if (dms[k]) {
976: DMDestroyShellDMDA(&dms[k]);
977: }
978: }
979: PetscFree(dms);
981: KSPDestroy(&ksp);
982: MatDestroy(&A);
983: VecDestroy(&b);
984: VecDestroy(&x);
986: for (k=0; k<nd; k++) {
987: if (comms[k] != MPI_COMM_NULL) {
988: PetscCommDestroy(&comms[k]);
989: }
990: }
991: PetscFree(comms);
992: return(0);
993: }
995: int main(int argc,char **argv)
996: {
998: PetscInt test_id = 0;
1000: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
1001: PetscOptionsGetInt(NULL,NULL,"-tid",&test_id,NULL);
1002: switch (test_id) {
1003: case 0:
1004: test_basic();
1005: break;
1006: case 1:
1007: test_hierarchy();
1008: break;
1009: case 2:
1010: test_mg();
1011: break;
1012: default:
1013: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"-tid must be 0,1,2");
1014: break;
1015: }
1016: PetscFinalize();
1017: return ierr;
1018: }
1020: PetscErrorCode ComputeRHS_DMDA(DM da,Vec b,void *ctx)
1021: {
1022: UserContext *user = (UserContext*)ctx;
1024: PetscInt i,j,mx,my,xm,ym,xs,ys;
1025: PetscScalar Hx,Hy;
1026: PetscScalar **array;
1027: PetscBool isda = PETSC_FALSE;
1030: PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1031: if (!isda) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"DM provided must be a DMDA");
1032: DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1033: Hx = 1.0 / (PetscReal)(mx-1);
1034: Hy = 1.0 / (PetscReal)(my-1);
1035: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1036: DMDAVecGetArray(da,b,&array);
1037: for (j=ys; j<ys+ym; j++) {
1038: for (i=xs; i<xs+xm; i++) {
1039: array[j][i] = PetscExpScalar(-((PetscReal)i*Hx)*((PetscReal)i*Hx)/user->nu)*PetscExpScalar(-((PetscReal)j*Hy)*((PetscReal)j*Hy)/user->nu)*Hx*Hy;
1040: }
1041: }
1042: DMDAVecRestoreArray(da, b, &array);
1043: VecAssemblyBegin(b);
1044: VecAssemblyEnd(b);
1046: /* force right hand side to be consistent for singular matrix */
1047: /* note this is really a hack, normally the model would provide you with a consistent right handside */
1048: if (user->bcType == NEUMANN) {
1049: MatNullSpace nullspace;
1051: MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,0,&nullspace);
1052: MatNullSpaceRemove(nullspace,b);
1053: MatNullSpaceDestroy(&nullspace);
1054: }
1055: return(0);
1056: }
1058: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
1059: {
1061: if ((i > mx/3.0) && (i < 2.0*mx/3.0) && (j > my/3.0) && (j < 2.0*my/3.0)) {
1062: *rho = centerRho;
1063: } else {
1064: *rho = 1.0;
1065: }
1066: return(0);
1067: }
1069: PetscErrorCode ComputeMatrix_DMDA(DM da,Mat J,Mat jac,void *ctx)
1070: {
1071: UserContext *user = (UserContext*)ctx;
1072: PetscReal centerRho;
1074: PetscInt i,j,mx,my,xm,ym,xs,ys;
1075: PetscScalar v[5];
1076: PetscReal Hx,Hy,HydHx,HxdHy,rho;
1077: MatStencil row, col[5];
1078: PetscBool isda = PETSC_FALSE;
1081: PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1082: if (!isda) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"DM provided must be a DMDA");
1083: MatZeroEntries(jac);
1084: centerRho = user->rho;
1085: DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1086: Hx = 1.0 / (PetscReal)(mx-1);
1087: Hy = 1.0 / (PetscReal)(my-1);
1088: HxdHy = Hx/Hy;
1089: HydHx = Hy/Hx;
1090: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1091: for (j=ys; j<ys+ym; j++) {
1092: for (i=xs; i<xs+xm; i++) {
1093: row.i = i; row.j = j;
1094: ComputeRho(i, j, mx, my, centerRho, &rho);
1095: if (i==0 || j==0 || i==mx-1 || j==my-1) {
1096: if (user->bcType == DIRICHLET) {
1097: v[0] = 2.0*rho*(HxdHy + HydHx);
1098: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
1099: } else if (user->bcType == NEUMANN) {
1100: PetscInt numx = 0, numy = 0, num = 0;
1101: if (j!=0) {
1102: v[num] = -rho*HxdHy; col[num].i = i; col[num].j = j-1;
1103: numy++; num++;
1104: }
1105: if (i!=0) {
1106: v[num] = -rho*HydHx; col[num].i = i-1; col[num].j = j;
1107: numx++; num++;
1108: }
1109: if (i!=mx-1) {
1110: v[num] = -rho*HydHx; col[num].i = i+1; col[num].j = j;
1111: numx++; num++;
1112: }
1113: if (j!=my-1) {
1114: v[num] = -rho*HxdHy; col[num].i = i; col[num].j = j+1;
1115: numy++; num++;
1116: }
1117: v[num] = numx*rho*HydHx + numy*rho*HxdHy; col[num].i = i; col[num].j = j;
1118: num++;
1119: MatSetValuesStencil(jac,1,&row,num,col,v,INSERT_VALUES);
1120: }
1121: } else {
1122: v[0] = -rho*HxdHy; col[0].i = i; col[0].j = j-1;
1123: v[1] = -rho*HydHx; col[1].i = i-1; col[1].j = j;
1124: v[2] = 2.0*rho*(HxdHy + HydHx); col[2].i = i; col[2].j = j;
1125: v[3] = -rho*HydHx; col[3].i = i+1; col[3].j = j;
1126: v[4] = -rho*HxdHy; col[4].i = i; col[4].j = j+1;
1127: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
1128: }
1129: }
1130: }
1131: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
1132: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
1133: return(0);
1134: }
1136: PetscErrorCode ComputeMatrix_ShellDA(KSP ksp,Mat J,Mat jac,void *ctx)
1137: {
1139: DM dm,da;
1141: KSPGetDM(ksp,&dm);
1142: DMShellGetContext(dm,(void**)&da);
1143: ComputeMatrix_DMDA(da,J,jac,ctx);
1144: return(0);
1145: }
1147: /*TEST
1148:
1149: test:
1150: suffix: basic_dirichlet
1151: nsize: 4
1152: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type lu -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr
1154: test:
1155: suffix: basic_neumann
1156: nsize: 4
1157: requires: !single
1158: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type jacobi -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr -bc_type neumann
1160: test:
1161: suffix: mg_2lv_2mg
1162: nsize: 6
1163: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 2 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -level_comm_red_factor 6 -mg_coarse_telescope_mg_coarse_pc_type lu
1165: test:
1166: suffix: mg_3lv_2mg
1167: nsize: 4
1168: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu -m 5
1170: test:
1171: suffix: mg_3lv_2mg_customcommsize
1172: nsize: 12
1173: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -m 5 -level_comm_red_factor 2,6 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu
1175: TEST*/