Actual source code: wb.c

  1: #include <petscdmda.h>
  2: #include <petsc/private/pcmgimpl.h>
  3: #include <petsc/private/hashmapi.h>

  5: typedef struct {
  6:   PCExoticType type;
  7:   Mat          P;           /* the constructed interpolation matrix */
  8:   PetscBool    directSolve; /* use direct LU factorization to construct interpolation */
  9:   KSP          ksp;
 10: } PC_Exotic;

 12: const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};

 14: /*
 15:       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space

 17: */
 18: static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
 19: {
 20:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
 21:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf;
 22:   Mat                    Xint, Xsurf, Xint_tmp;
 23:   IS                     isint, issurf, is, row, col;
 24:   ISLocalToGlobalMapping ltg;
 25:   MPI_Comm               comm;
 26:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
 27:   MatFactorInfo          info;
 28:   PetscScalar           *xsurf, *xint;
 29:   const PetscScalar     *rxint;
 30: #if defined(PETSC_USE_DEBUG_foo)
 31:   PetscScalar tmp;
 32: #endif
 33:   PetscHMapI ht = NULL;

 35:   PetscFunctionBegin;
 36:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
 37:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
 38:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
 39:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
 40:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
 41:   istart = istart ? -1 : 0;
 42:   jstart = jstart ? -1 : 0;
 43:   kstart = kstart ? -1 : 0;

 45:   /*
 46:     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
 47:     to all the local degrees of freedom (this includes the vertices, edges and faces).

 49:     Xint are the subset of the interpolation into the interior

 51:     Xface are the interpolation onto faces but not into the interior

 53:     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
 54:                                       Xint
 55:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
 56:                                       Xsurf
 57:   */
 58:   N     = (m - istart) * (n - jstart) * (p - kstart);
 59:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
 60:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
 61:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
 62:   Nsurf = Nface + Nwire;
 63:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
 64:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
 65:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

 67:   /*
 68:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
 69:      Xsurf will be all zero (thus making the coarse matrix singular).
 70:   */
 71:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
 72:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
 73:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

 75:   cnt = 0;

 77:   xsurf[cnt++] = 1;
 78:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
 79:   xsurf[cnt++ + 2 * Nsurf] = 1;

 81:   for (j = 1; j < n - 1 - jstart; j++) {
 82:     xsurf[cnt++ + 3 * Nsurf] = 1;
 83:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
 84:     xsurf[cnt++ + 5 * Nsurf] = 1;
 85:   }

 87:   xsurf[cnt++ + 6 * Nsurf] = 1;
 88:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
 89:   xsurf[cnt++ + 8 * Nsurf] = 1;

 91:   for (k = 1; k < p - 1 - kstart; k++) {
 92:     xsurf[cnt++ + 9 * Nsurf] = 1;
 93:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
 94:     xsurf[cnt++ + 11 * Nsurf] = 1;

 96:     for (j = 1; j < n - 1 - jstart; j++) {
 97:       xsurf[cnt++ + 12 * Nsurf] = 1;
 98:       /* these are the interior nodes */
 99:       xsurf[cnt++ + 13 * Nsurf] = 1;
100:     }

102:     xsurf[cnt++ + 14 * Nsurf] = 1;
103:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
104:     xsurf[cnt++ + 16 * Nsurf] = 1;
105:   }

107:   xsurf[cnt++ + 17 * Nsurf] = 1;
108:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
109:   xsurf[cnt++ + 19 * Nsurf] = 1;

111:   for (j = 1; j < n - 1 - jstart; j++) {
112:     xsurf[cnt++ + 20 * Nsurf] = 1;
113:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
114:     xsurf[cnt++ + 22 * Nsurf] = 1;
115:   }

117:   xsurf[cnt++ + 23 * Nsurf] = 1;
118:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
119:   xsurf[cnt++ + 25 * Nsurf] = 1;

121:   /* interpolations only sum to 1 when using direct solver */
122: #if defined(PETSC_USE_DEBUG_foo)
123:   for (i = 0; i < Nsurf; i++) {
124:     tmp = 0.0;
125:     for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
126:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
127:   }
128: #endif
129:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
130:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

132:   /*
133:        I are the indices for all the needed vertices (in global numbering)
134:        Iint are the indices for the interior values, I surf for the surface values
135:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
136:              is NOT the local DMDA ordering.)
137:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
138:   */
139: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
140:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
141:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
142:   for (k = 0; k < p - kstart; k++) {
143:     for (j = 0; j < n - jstart; j++) {
144:       for (i = 0; i < m - istart; i++) {
145:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

147:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
148:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
149:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
150:         } else {
151:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
152:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
153:         }
154:       }
155:     }
156:   }
157: #undef Endpoint
158:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
159:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
160:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
161:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
162:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
163:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
164:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
165:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
166:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
167:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
168:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
169:   PetscCall(PetscFree3(II, Iint, Isurf));

171:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
172:   A = *Aholder;
173:   PetscCall(PetscFree(Aholder));

175:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
176:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
177:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

179:   /*
180:      Solve for the interpolation onto the interior Xint
181:   */
182:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
183:   PetscCall(MatScale(Xint_tmp, -1.0));
184:   if (exotic->directSolve) {
185:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
186:     PetscCall(MatFactorInfoInitialize(&info));
187:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
188:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
189:     PetscCall(ISDestroy(&row));
190:     PetscCall(ISDestroy(&col));
191:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
192:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
193:     PetscCall(MatDestroy(&iAii));
194:   } else {
195:     Vec          b, x;
196:     PetscScalar *xint_tmp;

198:     PetscCall(MatDenseGetArray(Xint, &xint));
199:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
200:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
201:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
202:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
203:     for (i = 0; i < 26; i++) {
204:       PetscCall(VecPlaceArray(x, xint + i * Nint));
205:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
206:       PetscCall(KSPSolve(exotic->ksp, b, x));
207:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
208:       PetscCall(VecResetArray(x));
209:       PetscCall(VecResetArray(b));
210:     }
211:     PetscCall(MatDenseRestoreArray(Xint, &xint));
212:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
213:     PetscCall(VecDestroy(&x));
214:     PetscCall(VecDestroy(&b));
215:   }
216:   PetscCall(MatDestroy(&Xint_tmp));

218: #if defined(PETSC_USE_DEBUG_foo)
219:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
220:   for (i = 0; i < Nint; i++) {
221:     tmp = 0.0;
222:     for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];

224:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
225:   }
226:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
227:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
228: #endif

230:   /*         total vertices             total faces                                  total edges */
231:   Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);

233:   /*
234:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
235:   */
236:   cnt = 0;

238:   gl[cnt++] = 0;
239:   {
240:     gl[cnt++] = 1;
241:   }
242:   gl[cnt++] = m - istart - 1;
243:   {
244:     gl[cnt++] = mwidth;
245:     {
246:       gl[cnt++] = mwidth + 1;
247:     }
248:     gl[cnt++] = mwidth + m - istart - 1;
249:   }
250:   gl[cnt++] = mwidth * (n - jstart - 1);
251:   {
252:     gl[cnt++] = mwidth * (n - jstart - 1) + 1;
253:   }
254:   gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
255:   {
256:     gl[cnt++] = mwidth * nwidth;
257:     {
258:       gl[cnt++] = mwidth * nwidth + 1;
259:     }
260:     gl[cnt++] = mwidth * nwidth + m - istart - 1;
261:     {
262:       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
263:       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
264:     }
265:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
266:     {
267:       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
268:     }
269:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
270:   }
271:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
272:   {
273:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1;
274:   }
275:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
276:   {
277:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
278:     {
279:       gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
280:     }
281:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
282:   }
283:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
284:   {
285:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1;
286:   }
287:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;

289:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
290:   /* convert that to global numbering and get them on all processes */
291:   PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
292:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
293:   PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
294:   PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));

296:   /* Number the coarse grid points from 0 to Ntotal */
297:   PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
298:   for (i = 0, cnt = 0; i < 26 * mp * np * pp; i++) {
299:     PetscHashIter it      = 0;
300:     PetscBool     missing = PETSC_TRUE;

302:     PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
303:     if (missing) {
304:       ++cnt;
305:       PetscCall(PetscHMapIIterSet(ht, it, cnt));
306:     }
307:   }
308:   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
309:   PetscCall(PetscFree(globals));
310:   for (i = 0; i < 26; i++) {
311:     PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
312:     --(gl[i]);
313:   }
314:   PetscCall(PetscHMapIDestroy(&ht));
315:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */

317:   /* construct global interpolation matrix */
318:   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
319:   if (reuse == MAT_INITIAL_MATRIX) {
320:     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
321:   } else {
322:     PetscCall(MatZeroEntries(*P));
323:   }
324:   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
325:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
326:   PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
327:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
328:   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
329:   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
330:   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
331:   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
332:   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
333:   PetscCall(PetscFree2(IIint, IIsurf));

335: #if defined(PETSC_USE_DEBUG_foo)
336:   {
337:     Vec          x, y;
338:     PetscScalar *yy;
339:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
340:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
341:     PetscCall(VecSet(x, 1.0));
342:     PetscCall(MatMult(*P, x, y));
343:     PetscCall(VecGetArray(y, &yy));
344:     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
345:     PetscCall(VecRestoreArray(y, &yy));
346:     PetscCall(VecDestroy(x));
347:     PetscCall(VecDestroy(y));
348:   }
349: #endif

351:   PetscCall(MatDestroy(&Aii));
352:   PetscCall(MatDestroy(&Ais));
353:   PetscCall(MatDestroy(&Asi));
354:   PetscCall(MatDestroy(&A));
355:   PetscCall(ISDestroy(&is));
356:   PetscCall(ISDestroy(&isint));
357:   PetscCall(ISDestroy(&issurf));
358:   PetscCall(MatDestroy(&Xint));
359:   PetscCall(MatDestroy(&Xsurf));
360:   PetscFunctionReturn(PETSC_SUCCESS);
361: }

363: /*
364:       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space

366: */
367: static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
368: {
369:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
370:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf;
371:   Mat                    Xint, Xsurf, Xint_tmp;
372:   IS                     isint, issurf, is, row, col;
373:   ISLocalToGlobalMapping ltg;
374:   MPI_Comm               comm;
375:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
376:   MatFactorInfo          info;
377:   PetscScalar           *xsurf, *xint;
378:   const PetscScalar     *rxint;
379: #if defined(PETSC_USE_DEBUG_foo)
380:   PetscScalar tmp;
381: #endif
382:   PetscHMapI ht;

384:   PetscFunctionBegin;
385:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
386:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
387:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
388:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
389:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
390:   istart = istart ? -1 : 0;
391:   jstart = jstart ? -1 : 0;
392:   kstart = kstart ? -1 : 0;

394:   /*
395:     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
396:     to all the local degrees of freedom (this includes the vertices, edges and faces).

398:     Xint are the subset of the interpolation into the interior

400:     Xface are the interpolation onto faces but not into the interior

402:     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
403:                                       Xint
404:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
405:                                       Xsurf
406:   */
407:   N     = (m - istart) * (n - jstart) * (p - kstart);
408:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
409:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
410:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
411:   Nsurf = Nface + Nwire;
412:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
413:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
414:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

416:   /*
417:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
418:      Xsurf will be all zero (thus making the coarse matrix singular).
419:   */
420:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
421:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
422:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

424:   cnt = 0;
425:   for (j = 1; j < n - 1 - jstart; j++) {
426:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
427:   }

429:   for (k = 1; k < p - 1 - kstart; k++) {
430:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
431:     for (j = 1; j < n - 1 - jstart; j++) {
432:       xsurf[cnt++ + 2 * Nsurf] = 1;
433:       /* these are the interior nodes */
434:       xsurf[cnt++ + 3 * Nsurf] = 1;
435:     }
436:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
437:   }
438:   for (j = 1; j < n - 1 - jstart; j++) {
439:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
440:   }

442: #if defined(PETSC_USE_DEBUG_foo)
443:   for (i = 0; i < Nsurf; i++) {
444:     tmp = 0.0;
445:     for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];

447:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
448:   }
449: #endif
450:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
451:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

453:   /*
454:        I are the indices for all the needed vertices (in global numbering)
455:        Iint are the indices for the interior values, I surf for the surface values
456:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
457:              is NOT the local DMDA ordering.)
458:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
459:   */
460: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
461:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
462:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
463:   for (k = 0; k < p - kstart; k++) {
464:     for (j = 0; j < n - jstart; j++) {
465:       for (i = 0; i < m - istart; i++) {
466:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

468:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
469:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
470:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
471:         } else {
472:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
473:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
474:         }
475:       }
476:     }
477:   }
478: #undef Endpoint
479:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
480:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
481:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
482:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
483:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
484:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
485:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
486:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
487:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
488:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
489:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
490:   PetscCall(PetscFree3(II, Iint, Isurf));

492:   PetscCall(ISSort(is));
493:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
494:   A = *Aholder;
495:   PetscCall(PetscFree(Aholder));

497:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
498:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
499:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

501:   /*
502:      Solve for the interpolation onto the interior Xint
503:   */
504:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
505:   PetscCall(MatScale(Xint_tmp, -1.0));

507:   if (exotic->directSolve) {
508:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
509:     PetscCall(MatFactorInfoInitialize(&info));
510:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
511:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
512:     PetscCall(ISDestroy(&row));
513:     PetscCall(ISDestroy(&col));
514:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
515:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
516:     PetscCall(MatDestroy(&iAii));
517:   } else {
518:     Vec          b, x;
519:     PetscScalar *xint_tmp;

521:     PetscCall(MatDenseGetArray(Xint, &xint));
522:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
523:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
524:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
525:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
526:     for (i = 0; i < 6; i++) {
527:       PetscCall(VecPlaceArray(x, xint + i * Nint));
528:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
529:       PetscCall(KSPSolve(exotic->ksp, b, x));
530:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
531:       PetscCall(VecResetArray(x));
532:       PetscCall(VecResetArray(b));
533:     }
534:     PetscCall(MatDenseRestoreArray(Xint, &xint));
535:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
536:     PetscCall(VecDestroy(&x));
537:     PetscCall(VecDestroy(&b));
538:   }
539:   PetscCall(MatDestroy(&Xint_tmp));

541: #if defined(PETSC_USE_DEBUG_foo)
542:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
543:   for (i = 0; i < Nint; i++) {
544:     tmp = 0.0;
545:     for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];

547:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
548:   }
549:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
550:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
551: #endif

553:   /*         total faces    */
554:   Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);

556:   /*
557:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
558:   */
559:   cnt = 0;
560:   {
561:     gl[cnt++] = mwidth + 1;
562:   }
563:   {{gl[cnt++] = mwidth * nwidth + 1;
564: }
565: {
566:   gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
567:   gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
568: }
569: {
570:   gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
571: }
572: }
573: {
574:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
575: }

577: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
578: /* convert that to global numbering and get them on all processes */
579: PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
580: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
581: PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
582: PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));

584: /* Number the coarse grid points from 0 to Ntotal */
585: PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
586: for (i = 0, cnt = 0; i < 6 * mp * np * pp; i++) {
587:   PetscHashIter it      = 0;
588:   PetscBool     missing = PETSC_TRUE;

590:   PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
591:   if (missing) {
592:     ++cnt;
593:     PetscCall(PetscHMapIIterSet(ht, it, cnt));
594:   }
595: }
596: PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
597: PetscCall(PetscFree(globals));
598: for (i = 0; i < 6; i++) {
599:   PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
600:   --(gl[i]);
601: }
602: PetscCall(PetscHMapIDestroy(&ht));
603: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */

605: /* construct global interpolation matrix */
606: PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
607: if (reuse == MAT_INITIAL_MATRIX) {
608:   PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
609: } else {
610:   PetscCall(MatZeroEntries(*P));
611: }
612: PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
613: PetscCall(MatDenseGetArrayRead(Xint, &rxint));
614: PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
615: PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
616: PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
617: PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
618: PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
619: PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
620: PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
621: PetscCall(PetscFree2(IIint, IIsurf));

623: #if defined(PETSC_USE_DEBUG_foo)
624: {
625:   Vec          x, y;
626:   PetscScalar *yy;
627:   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
628:   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
629:   PetscCall(VecSet(x, 1.0));
630:   PetscCall(MatMult(*P, x, y));
631:   PetscCall(VecGetArray(y, &yy));
632:   for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
633:   PetscCall(VecRestoreArray(y, &yy));
634:   PetscCall(VecDestroy(x));
635:   PetscCall(VecDestroy(y));
636: }
637: #endif

639: PetscCall(MatDestroy(&Aii));
640: PetscCall(MatDestroy(&Ais));
641: PetscCall(MatDestroy(&Asi));
642: PetscCall(MatDestroy(&A));
643: PetscCall(ISDestroy(&is));
644: PetscCall(ISDestroy(&isint));
645: PetscCall(ISDestroy(&issurf));
646: PetscCall(MatDestroy(&Xint));
647: PetscCall(MatDestroy(&Xsurf));
648: PetscFunctionReturn(PETSC_SUCCESS);
649: }

651: /*@
652:   PCExoticSetType - Sets the type of coarse grid interpolation to use

654:   Logically Collective

656:   Input Parameters:
657: + pc   - the preconditioner context
658: - type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)

660:   Notes:
661:   The face based interpolation has 1 degree of freedom per face and ignores the
662:   edge and vertex values completely in the coarse problem. For any seven point
663:   stencil the interpolation of a constant on all faces into the interior is that constant.

665:   The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
666:   per face. A constant on the subdomain boundary is interpolated as that constant
667:   in the interior of the domain.

669:   The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
670:   if A is nonsingular A_c is also nonsingular.

672:   Both interpolations are suitable for only scalar problems.

674:   Level: intermediate

676: .seealso: [](ch_ksp), `PCEXOTIC`, `PCExoticType()`
677: @*/
678: PetscErrorCode PCExoticSetType(PC pc, PCExoticType type)
679: {
680:   PetscFunctionBegin;
683:   PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
684:   PetscFunctionReturn(PETSC_SUCCESS);
685: }

687: static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type)
688: {
689:   PC_MG     *mg  = (PC_MG *)pc->data;
690:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

692:   PetscFunctionBegin;
693:   ctx->type = type;
694:   PetscFunctionReturn(PETSC_SUCCESS);
695: }

697: static PetscErrorCode PCSetUp_Exotic(PC pc)
698: {
699:   Mat        A;
700:   PC_MG     *mg    = (PC_MG *)pc->data;
701:   PC_Exotic *ex    = (PC_Exotic *)mg->innerctx;
702:   MatReuse   reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;

704:   PetscFunctionBegin;
705:   PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
706:   PetscCall(PCGetOperators(pc, NULL, &A));
707:   PetscCheck(ex->type == PC_EXOTIC_FACE || ex->type == PC_EXOTIC_WIREBASKET, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
708:   if (ex->type == PC_EXOTIC_FACE) {
709:     PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
710:   } else /* if (ex->type == PC_EXOTIC_WIREBASKET) */ {
711:     PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
712:   }
713:   PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
714:   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
715:   PetscCall(PCSetDM(pc, NULL));
716:   PetscCall(PCSetUp_MG(pc));
717:   PetscFunctionReturn(PETSC_SUCCESS);
718: }

720: static PetscErrorCode PCDestroy_Exotic(PC pc)
721: {
722:   PC_MG     *mg  = (PC_MG *)pc->data;
723:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

725:   PetscFunctionBegin;
726:   PetscCall(MatDestroy(&ctx->P));
727:   PetscCall(KSPDestroy(&ctx->ksp));
728:   PetscCall(PetscFree(ctx));
729:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
730:   PetscCall(PCDestroy_MG(pc));
731:   PetscFunctionReturn(PETSC_SUCCESS);
732: }

734: static PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer)
735: {
736:   PC_MG     *mg = (PC_MG *)pc->data;
737:   PetscBool  iascii;
738:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

740:   PetscFunctionBegin;
741:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
742:   if (iascii) {
743:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Exotic type = %s\n", PCExoticTypes[ctx->type]));
744:     if (ctx->directSolve) {
745:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using direct solver to construct interpolation\n"));
746:     } else {
747:       PetscViewer sviewer;
748:       PetscMPIInt rank;

750:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using iterative solver to construct interpolation\n"));
751:       PetscCall(PetscViewerASCIIPushTab(viewer));
752:       PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
753:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
754:       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
755:       if (rank == 0) PetscCall(KSPView(ctx->ksp, sviewer));
756:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
757:       PetscCall(PetscViewerASCIIPopTab(viewer));
758:       PetscCall(PetscViewerASCIIPopTab(viewer));
759:     }
760:   }
761:   PetscCall(PCView_MG(pc, viewer));
762:   PetscFunctionReturn(PETSC_SUCCESS);
763: }

765: static PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject)
766: {
767:   PetscBool    flg;
768:   PC_MG       *mg = (PC_MG *)pc->data;
769:   PCExoticType mgctype;
770:   PC_Exotic   *ctx = (PC_Exotic *)mg->innerctx;

772:   PetscFunctionBegin;
773:   PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
774:   PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
775:   if (flg) PetscCall(PCExoticSetType(pc, mgctype));
776:   PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
777:   if (!ctx->directSolve) {
778:     if (!ctx->ksp) {
779:       const char *prefix;
780:       PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
781:       PetscCall(KSPSetNestLevel(ctx->ksp, pc->kspnestlevel));
782:       PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
783:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
784:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
785:       PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
786:       PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
787:     }
788:     PetscCall(KSPSetFromOptions(ctx->ksp));
789:   }
790:   PetscOptionsHeadEnd();
791:   PetscFunctionReturn(PETSC_SUCCESS);
792: }

794: /*MC
795:      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces

797:      This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse
798:    grid spaces.

800:    Level: advanced

802:    Notes:
803:    Must be used with `DMDA`

805:    By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES`

807:    These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz {cite}`bramble1989construction`.
808:    They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith, {cite}`smith1990domain`.
809:    They were then explored in great detail in Dryja, Smith, Widlund {cite}`dryja1994schwarz`. These were developed in the context of iterative substructuring preconditioners.

811:    They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
812:    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example, {cite}`dohrmann2008extending`, {cite}`dohrmann2008family`,
813:    {cite}`dohrmann2008domain`, {cite}`dohrmann2009overlapping`.

815:    The usual `PCMG` options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type> and  -pc_mg_type <type>

817: .seealso: [](ch_ksp), `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
818: M*/

820: PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
821: {
822:   PC_Exotic *ex;
823:   PC_MG     *mg;

825:   PetscFunctionBegin;
826:   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
827:   PetscTryTypeMethod(pc, destroy);
828:   pc->data = NULL;

830:   PetscCall(PetscFree(((PetscObject)pc)->type_name));
831:   ((PetscObject)pc)->type_name = NULL;

833:   PetscCall(PCSetType(pc, PCMG));
834:   PetscCall(PCMGSetLevels(pc, 2, NULL));
835:   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
836:   PetscCall(PetscNew(&ex));
837:   ex->type     = PC_EXOTIC_FACE;
838:   mg           = (PC_MG *)pc->data;
839:   mg->innerctx = ex;

841:   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
842:   pc->ops->view           = PCView_Exotic;
843:   pc->ops->destroy        = PCDestroy_Exotic;
844:   pc->ops->setup          = PCSetUp_Exotic;

846:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
847:   PetscFunctionReturn(PETSC_SUCCESS);
848: }