Actual source code: pcpatch.c
petsc-3.10.5 2019-03-28
1: #include <petsc/private/pcpatchimpl.h>
2: #include <petsc/private/dmpleximpl.h> /* For DMPlexComputeJacobian_Patch_Internal() */
3: #include <petscsf.h>
4: #include <petscbt.h>
5: #include <petscds.h>
7: PetscLogEvent PC_Patch_CreatePatches, PC_Patch_ComputeOp, PC_Patch_Solve, PC_Patch_Scatter, PC_Patch_Apply, PC_Patch_Prealloc;
9: PETSC_STATIC_INLINE PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
10: {
13: PetscViewerPushFormat(viewer, format);
14: PetscObjectView(obj, viewer);
15: PetscViewerPopFormat(viewer);
16: return(0);
17: }
19: static PetscErrorCode PCPatchConstruct_Star(void *vpatch, DM dm, PetscInt point, PetscHSetI ht)
20: {
21: PetscInt starSize;
22: PetscInt *star = NULL, si;
26: PetscHSetIClear(ht);
27: /* To start with, add the point we care about */
28: PetscHSetIAdd(ht, point);
29: /* Loop over all the points that this point connects to */
30: DMPlexGetTransitiveClosure(dm, point, PETSC_FALSE, &starSize, &star);
31: for (si = 0; si < starSize*2; si += 2) {PetscHSetIAdd(ht, star[si]);}
32: DMPlexRestoreTransitiveClosure(dm, point, PETSC_FALSE, &starSize, &star);
33: return(0);
34: }
36: static PetscErrorCode PCPatchConstruct_Vanka(void *vpatch, DM dm, PetscInt point, PetscHSetI ht)
37: {
38: PC_PATCH *patch = (PC_PATCH *) vpatch;
39: PetscInt starSize;
40: PetscInt *star = NULL;
41: PetscBool shouldIgnore = PETSC_FALSE;
42: PetscInt cStart, cEnd, iStart, iEnd, si;
46: PetscHSetIClear(ht);
47: /* To start with, add the point we care about */
48: PetscHSetIAdd(ht, point);
49: /* Should we ignore any points of a certain dimension? */
50: if (patch->vankadim >= 0) {
51: shouldIgnore = PETSC_TRUE;
52: DMPlexGetDepthStratum(dm, patch->vankadim, &iStart, &iEnd);
53: }
54: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
55: /* Loop over all the cells that this point connects to */
56: DMPlexGetTransitiveClosure(dm, point, PETSC_FALSE, &starSize, &star);
57: for (si = 0; si < starSize*2; si += 2) {
58: const PetscInt cell = star[si];
59: PetscInt closureSize;
60: PetscInt *closure = NULL, ci;
62: if (cell < cStart || cell >= cEnd) continue;
63: /* now loop over all entities in the closure of that cell */
64: DMPlexGetTransitiveClosure(dm, cell, PETSC_TRUE, &closureSize, &closure);
65: for (ci = 0; ci < closureSize*2; ci += 2) {
66: const PetscInt newpoint = closure[ci];
68: /* We've been told to ignore entities of this type.*/
69: if (shouldIgnore && newpoint >= iStart && newpoint < iEnd) continue;
70: PetscHSetIAdd(ht, newpoint);
71: }
72: DMPlexRestoreTransitiveClosure(dm, cell, PETSC_TRUE, &closureSize, &closure);
73: }
74: DMPlexRestoreTransitiveClosure(dm, point, PETSC_FALSE, &starSize, &star);
75: return(0);
76: }
78: /* The user's already set the patches in patch->userIS. Build the hash tables */
79: static PetscErrorCode PCPatchConstruct_User(void *vpatch, DM dm, PetscInt point, PetscHSetI ht)
80: {
81: PC_PATCH *patch = (PC_PATCH *) vpatch;
82: IS patchis = patch->userIS[point];
83: PetscInt n;
84: const PetscInt *patchdata;
85: PetscInt pStart, pEnd, i;
86: PetscErrorCode ierr;
89: PetscHSetIClear(ht);
90: DMPlexGetChart(dm, &pStart, &pEnd);
91: ISGetLocalSize(patchis, &n);
92: ISGetIndices(patchis, &patchdata);
93: for (i = 0; i < n; ++i) {
94: const PetscInt ownedpoint = patchdata[i];
96: if (ownedpoint < pStart || ownedpoint >= pEnd) {
97: SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Mesh point %D was not in [%D, %D)", ownedpoint, pStart, pEnd);
98: }
99: PetscHSetIAdd(ht, ownedpoint);
100: }
101: ISRestoreIndices(patchis, &patchdata);
102: return(0);
103: }
105: static PetscErrorCode PCPatchCreateDefaultSF_Private(PC pc, PetscInt n, const PetscSF *sf, const PetscInt *bs)
106: {
107: PC_PATCH *patch = (PC_PATCH *) pc->data;
108: PetscInt i;
112: if (n == 1 && bs[0] == 1) {
113: patch->defaultSF = sf[0];
114: PetscObjectReference((PetscObject) patch->defaultSF);
115: } else {
116: PetscInt allRoots = 0, allLeaves = 0;
117: PetscInt leafOffset = 0;
118: PetscInt *ilocal = NULL;
119: PetscSFNode *iremote = NULL;
120: PetscInt *remoteOffsets = NULL;
121: PetscInt index = 0;
122: PetscHMapI rankToIndex;
123: PetscInt numRanks = 0;
124: PetscSFNode *remote = NULL;
125: PetscSF rankSF;
126: PetscInt *ranks = NULL;
127: PetscInt *offsets = NULL;
128: MPI_Datatype contig;
129: PetscHSetI ranksUniq;
131: /* First figure out how many dofs there are in the concatenated numbering.
132: * allRoots: number of owned global dofs;
133: * allLeaves: number of visible dofs (global + ghosted).
134: */
135: for (i = 0; i < n; ++i) {
136: PetscInt nroots, nleaves;
138: PetscSFGetGraph(sf[i], &nroots, &nleaves, NULL, NULL);
139: allRoots += nroots * bs[i];
140: allLeaves += nleaves * bs[i];
141: }
142: PetscMalloc1(allLeaves, &ilocal);
143: PetscMalloc1(allLeaves, &iremote);
144: /* Now build an SF that just contains process connectivity. */
145: PetscHSetICreate(&ranksUniq);
146: for (i = 0; i < n; ++i) {
147: const PetscMPIInt *ranks = NULL;
148: PetscInt nranks, j;
150: PetscSFSetUp(sf[i]);
151: PetscSFGetRanks(sf[i], &nranks, &ranks, NULL, NULL, NULL);
152: /* These are all the ranks who communicate with me. */
153: for (j = 0; j < nranks; ++j) {
154: PetscHSetIAdd(ranksUniq, (PetscInt) ranks[j]);
155: }
156: }
157: PetscHSetIGetSize(ranksUniq, &numRanks);
158: PetscMalloc1(numRanks, &remote);
159: PetscMalloc1(numRanks, &ranks);
160: PetscHSetIGetElems(ranksUniq, &index, ranks);
162: PetscHMapICreate(&rankToIndex);
163: for (i = 0; i < numRanks; ++i) {
164: remote[i].rank = ranks[i];
165: remote[i].index = 0;
166: PetscHMapISet(rankToIndex, ranks[i], i);
167: }
168: PetscFree(ranks);
169: PetscHSetIDestroy(&ranksUniq);
170: PetscSFCreate(PetscObjectComm((PetscObject) pc), &rankSF);
171: PetscSFSetGraph(rankSF, 1, numRanks, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
172: PetscSFSetUp(rankSF);
173: /* OK, use it to communicate the root offset on the remote
174: * processes for each subspace. */
175: PetscMalloc1(n, &offsets);
176: PetscMalloc1(n*numRanks, &remoteOffsets);
178: offsets[0] = 0;
179: for (i = 1; i < n; ++i) {
180: PetscInt nroots;
182: PetscSFGetGraph(sf[i-1], &nroots, NULL, NULL, NULL);
183: offsets[i] = offsets[i-1] + nroots*bs[i-1];
184: }
185: /* Offsets are the offsets on the current process of the
186: * global dof numbering for the subspaces. */
187: MPI_Type_contiguous(n, MPIU_INT, &contig);
188: MPI_Type_commit(&contig);
190: PetscSFBcastBegin(rankSF, contig, offsets, remoteOffsets);
191: PetscSFBcastEnd(rankSF, contig, offsets, remoteOffsets);
192: MPI_Type_free(&contig);
193: PetscFree(offsets);
194: PetscSFDestroy(&rankSF);
195: /* Now remoteOffsets contains the offsets on the remote
196: * processes who communicate with me. So now we can
197: * concatenate the list of SFs into a single one. */
198: index = 0;
199: for (i = 0; i < n; ++i) {
200: const PetscSFNode *remote = NULL;
201: const PetscInt *local = NULL;
202: PetscInt nroots, nleaves, j;
204: PetscSFGetGraph(sf[i], &nroots, &nleaves, &local, &remote);
205: for (j = 0; j < nleaves; ++j) {
206: PetscInt rank = remote[j].rank;
207: PetscInt idx, rootOffset, k;
209: PetscHMapIGet(rankToIndex, rank, &idx);
210: if (idx == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Didn't find rank, huh?");
211: /* Offset on given rank for ith subspace */
212: rootOffset = remoteOffsets[n*idx + i];
213: for (k = 0; k < bs[i]; ++k) {
214: ilocal[index] = (local ? local[j] : j)*bs[i] + k + leafOffset;
215: iremote[index].rank = remote[j].rank;
216: iremote[index].index = remote[j].index*bs[i] + k + rootOffset;
217: ++index;
218: }
219: }
220: leafOffset += nleaves * bs[i];
221: }
222: PetscHMapIDestroy(&rankToIndex);
223: PetscFree(remoteOffsets);
224: PetscSFCreate(PetscObjectComm((PetscObject)pc), &patch->defaultSF);
225: PetscSFSetGraph(patch->defaultSF, allRoots, allLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);
226: }
227: return(0);
228: }
230: /* TODO: Docs */
231: PetscErrorCode PCPatchSetIgnoreDim(PC pc, PetscInt dim)
232: {
233: PC_PATCH *patch = (PC_PATCH *) pc->data;
235: patch->ignoredim = dim;
236: return(0);
237: }
239: /* TODO: Docs */
240: PetscErrorCode PCPatchGetIgnoreDim(PC pc, PetscInt *dim)
241: {
242: PC_PATCH *patch = (PC_PATCH *) pc->data;
244: *dim = patch->ignoredim;
245: return(0);
246: }
248: /* TODO: Docs */
249: PetscErrorCode PCPatchSetSaveOperators(PC pc, PetscBool flg)
250: {
251: PC_PATCH *patch = (PC_PATCH *) pc->data;
253: patch->save_operators = flg;
254: return(0);
255: }
257: /* TODO: Docs */
258: PetscErrorCode PCPatchGetSaveOperators(PC pc, PetscBool *flg)
259: {
260: PC_PATCH *patch = (PC_PATCH *) pc->data;
262: *flg = patch->save_operators;
263: return(0);
264: }
266: /* TODO: Docs */
267: PetscErrorCode PCPatchSetPartitionOfUnity(PC pc, PetscBool flg)
268: {
269: PC_PATCH *patch = (PC_PATCH *) pc->data;
271: patch->partition_of_unity = flg;
272: return(0);
273: }
275: /* TODO: Docs */
276: PetscErrorCode PCPatchGetPartitionOfUnity(PC pc, PetscBool *flg)
277: {
278: PC_PATCH *patch = (PC_PATCH *) pc->data;
280: *flg = patch->partition_of_unity;
281: return(0);
282: }
284: /* TODO: Docs */
285: PetscErrorCode PCPatchSetSubMatType(PC pc, MatType sub_mat_type)
286: {
287: PC_PATCH *patch = (PC_PATCH *) pc->data;
291: if (patch->sub_mat_type) {PetscFree(patch->sub_mat_type);}
292: PetscStrallocpy(sub_mat_type, (char **) &patch->sub_mat_type);
293: return(0);
294: }
296: /* TODO: Docs */
297: PetscErrorCode PCPatchGetSubMatType(PC pc, MatType *sub_mat_type)
298: {
299: PC_PATCH *patch = (PC_PATCH *) pc->data;
301: *sub_mat_type = patch->sub_mat_type;
302: return(0);
303: }
305: /* TODO: Docs */
306: PetscErrorCode PCPatchSetCellNumbering(PC pc, PetscSection cellNumbering)
307: {
308: PC_PATCH *patch = (PC_PATCH *) pc->data;
312: patch->cellNumbering = cellNumbering;
313: PetscObjectReference((PetscObject) cellNumbering);
314: return(0);
315: }
317: /* TODO: Docs */
318: PetscErrorCode PCPatchGetCellNumbering(PC pc, PetscSection *cellNumbering)
319: {
320: PC_PATCH *patch = (PC_PATCH *) pc->data;
322: *cellNumbering = patch->cellNumbering;
323: return(0);
324: }
326: /* TODO: Docs */
327: PetscErrorCode PCPatchSetConstructType(PC pc, PCPatchConstructType ctype, PetscErrorCode (*func)(PC, PetscInt *, IS **, IS *, void *), void *ctx)
328: {
329: PC_PATCH *patch = (PC_PATCH *) pc->data;
332: patch->ctype = ctype;
333: switch (ctype) {
334: case PC_PATCH_STAR:
335: patch->user_patches = PETSC_FALSE;
336: patch->patchconstructop = PCPatchConstruct_Star;
337: break;
338: case PC_PATCH_VANKA:
339: patch->user_patches = PETSC_FALSE;
340: patch->patchconstructop = PCPatchConstruct_Vanka;
341: break;
342: case PC_PATCH_USER:
343: case PC_PATCH_PYTHON:
344: patch->user_patches = PETSC_TRUE;
345: patch->patchconstructop = PCPatchConstruct_User;
346: if (func) {
347: patch->userpatchconstructionop = func;
348: patch->userpatchconstructctx = ctx;
349: }
350: break;
351: default:
352: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_USER, "Unknown patch construction type %D", (PetscInt) patch->ctype);
353: }
354: return(0);
355: }
357: /* TODO: Docs */
358: PetscErrorCode PCPatchGetConstructType(PC pc, PCPatchConstructType *ctype, PetscErrorCode (**func)(PC, PetscInt *, IS **, IS *, void *), void **ctx)
359: {
360: PC_PATCH *patch = (PC_PATCH *) pc->data;
363: *ctype = patch->ctype;
364: switch (patch->ctype) {
365: case PC_PATCH_STAR:
366: case PC_PATCH_VANKA:
367: break;
368: case PC_PATCH_USER:
369: case PC_PATCH_PYTHON:
370: *func = patch->userpatchconstructionop;
371: *ctx = patch->userpatchconstructctx;
372: break;
373: default:
374: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_USER, "Unknown patch construction type %D", (PetscInt) patch->ctype);
375: }
376: return(0);
377: }
379: /* TODO: Docs */
380: PetscErrorCode PCPatchSetDiscretisationInfo(PC pc, PetscInt nsubspaces, DM *dms, PetscInt *bs, PetscInt *nodesPerCell, const PetscInt **cellNodeMap,
381: const PetscInt *subspaceOffsets, PetscInt numGhostBcs, const PetscInt *ghostBcNodes, PetscInt numGlobalBcs, const PetscInt *globalBcNodes)
382: {
383: PC_PATCH *patch = (PC_PATCH *) pc->data;
384: DM dm;
385: PetscSF *sfs;
386: PetscInt cStart, cEnd, i, j;
390: PCGetDM(pc, &dm);
391: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
392: PetscMalloc1(nsubspaces, &sfs);
393: PetscMalloc1(nsubspaces, &patch->dofSection);
394: PetscMalloc1(nsubspaces, &patch->bs);
395: PetscMalloc1(nsubspaces, &patch->nodesPerCell);
396: PetscMalloc1(nsubspaces, &patch->cellNodeMap);
397: PetscMalloc1(nsubspaces+1, &patch->subspaceOffsets);
399: patch->nsubspaces = nsubspaces;
400: patch->totalDofsPerCell = 0;
401: for (i = 0; i < nsubspaces; ++i) {
402: DMGetDefaultSection(dms[i], &patch->dofSection[i]);
403: PetscObjectReference((PetscObject) patch->dofSection[i]);
404: DMGetDefaultSF(dms[i], &sfs[i]);
405: patch->bs[i] = bs[i];
406: patch->nodesPerCell[i] = nodesPerCell[i];
407: patch->totalDofsPerCell += nodesPerCell[i]*bs[i];
408: PetscMalloc1((cEnd-cStart)*nodesPerCell[i]*bs[i], &patch->cellNodeMap[i]);
409: for (j = 0; j < (cEnd-cStart)*nodesPerCell[i]*bs[i]; ++j) patch->cellNodeMap[i][j] = cellNodeMap[i][j];
410: patch->subspaceOffsets[i] = subspaceOffsets[i];
411: }
412: PCPatchCreateDefaultSF_Private(pc, nsubspaces, sfs, patch->bs);
413: PetscFree(sfs);
415: patch->subspaceOffsets[nsubspaces] = subspaceOffsets[nsubspaces];
416: ISCreateGeneral(PETSC_COMM_SELF, numGhostBcs, ghostBcNodes, PETSC_COPY_VALUES, &patch->ghostBcNodes);
417: ISCreateGeneral(PETSC_COMM_SELF, numGlobalBcs, globalBcNodes, PETSC_COPY_VALUES, &patch->globalBcNodes);
418: return(0);
419: }
421: /* TODO: Docs */
422: PetscErrorCode PCPatchSetDiscretisationInfoCombined(PC pc, DM dm, PetscInt *nodesPerCell, const PetscInt **cellNodeMap, PetscInt numGhostBcs, const PetscInt *ghostBcNodes, PetscInt numGlobalBcs, const PetscInt *globalBcNodes)
423: {
424: PC_PATCH *patch = (PC_PATCH *) pc->data;
425: PetscInt cStart, cEnd, i, j;
429: patch->combined = PETSC_TRUE;
430: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
431: DMGetNumFields(dm, &patch->nsubspaces);
432: PetscCalloc1(patch->nsubspaces, &patch->dofSection);
433: PetscMalloc1(patch->nsubspaces, &patch->bs);
434: PetscMalloc1(patch->nsubspaces, &patch->nodesPerCell);
435: PetscMalloc1(patch->nsubspaces, &patch->cellNodeMap);
436: PetscCalloc1(patch->nsubspaces+1, &patch->subspaceOffsets);
437: DMGetDefaultSection(dm, &patch->dofSection[0]);
438: PetscObjectReference((PetscObject) patch->dofSection[0]);
439: PetscSectionGetStorageSize(patch->dofSection[0], &patch->subspaceOffsets[patch->nsubspaces]);
440: patch->totalDofsPerCell = 0;
441: for (i = 0; i < patch->nsubspaces; ++i) {
442: patch->bs[i] = 1;
443: patch->nodesPerCell[i] = nodesPerCell[i];
444: patch->totalDofsPerCell += nodesPerCell[i];
445: PetscMalloc1((cEnd-cStart)*nodesPerCell[i], &patch->cellNodeMap[i]);
446: for (j = 0; j < (cEnd-cStart)*nodesPerCell[i]; ++j) patch->cellNodeMap[i][j] = cellNodeMap[i][j];
447: }
448: DMGetDefaultSF(dm, &patch->defaultSF);
449: PetscObjectReference((PetscObject) patch->defaultSF);
450: ISCreateGeneral(PETSC_COMM_SELF, numGhostBcs, ghostBcNodes, PETSC_COPY_VALUES, &patch->ghostBcNodes);
451: ISCreateGeneral(PETSC_COMM_SELF, numGlobalBcs, globalBcNodes, PETSC_COPY_VALUES, &patch->globalBcNodes);
452: return(0);
453: }
455: /*@C
457: PCPatchSetComputeOperator - Set the callback used to compute patch matrices
459: Input Parameters:
460: + pc - The PC
461: . func - The callback
462: - ctx - The user context
464: Level: advanced
466: Note:
467: The callback has signature:
468: + usercomputeop(pc, point, mat, cellIS, n, u, ctx)
469: + pc - The PC
470: + point - The point
471: + mat - The patch matrix
472: + cellIS - An array of the cell numbers
473: + n - The size of g2l
474: + g2l - The global to local dof translation table
475: + ctx - The user context
476: and can assume that the matrix entries have been set to zero before the call.
478: .seealso: PCPatchGetComputeOperator(), PCPatchSetDiscretisationInfo()
479: @*/
480: PetscErrorCode PCPatchSetComputeOperator(PC pc, PetscErrorCode (*func)(PC, PetscInt, Mat, IS, PetscInt, const PetscInt *, void *), void *ctx)
481: {
482: PC_PATCH *patch = (PC_PATCH *) pc->data;
485: patch->usercomputeop = func;
486: patch->usercomputectx = ctx;
487: return(0);
488: }
490: /* On entry, ht contains the topological entities whose dofs we are responsible for solving for;
491: on exit, cht contains all the topological entities we need to compute their residuals.
492: In full generality this should incorporate knowledge of the sparsity pattern of the matrix;
493: here we assume a standard FE sparsity pattern.*/
494: /* TODO: Use DMPlexGetAdjacency() */
495: static PetscErrorCode PCPatchCompleteCellPatch(PC pc, PetscHSetI ht, PetscHSetI cht)
496: {
497: DM dm;
498: PetscHashIter hi;
499: PetscInt point;
500: PetscInt *star = NULL, *closure = NULL;
501: PetscInt ignoredim, iStart = 0, iEnd = -1, starSize, closureSize, si, ci;
505: PCGetDM(pc, &dm);
506: PCPatchGetIgnoreDim(pc, &ignoredim);
507: if (ignoredim >= 0) {DMPlexGetDepthStratum(dm, ignoredim, &iStart, &iEnd);}
508: PetscHSetIClear(cht);
509: PetscHashIterBegin(ht, hi);
510: while (!PetscHashIterAtEnd(ht, hi)) {
512: PetscHashIterGetKey(ht, hi, point);
513: PetscHashIterNext(ht, hi);
515: /* Loop over all the cells that this point connects to */
516: DMPlexGetTransitiveClosure(dm, point, PETSC_FALSE, &starSize, &star);
517: for (si = 0; si < starSize*2; si += 2) {
518: const PetscInt ownedpoint = star[si];
519: /* TODO Check for point in cht before running through closure again */
520: /* now loop over all entities in the closure of that cell */
521: DMPlexGetTransitiveClosure(dm, ownedpoint, PETSC_TRUE, &closureSize, &closure);
522: for (ci = 0; ci < closureSize*2; ci += 2) {
523: const PetscInt seenpoint = closure[ci];
524: if (ignoredim >= 0 && seenpoint >= iStart && seenpoint < iEnd) continue;
525: PetscHSetIAdd(cht, seenpoint);
526: }
527: }
528: }
529: DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);
530: DMPlexRestoreTransitiveClosure(dm, 0, PETSC_FALSE, NULL, &star);
531: return(0);
532: }
534: static PetscErrorCode PCPatchGetGlobalDofs(PC pc, PetscSection dofSection[], PetscInt f, PetscBool combined, PetscInt p, PetscInt *dof, PetscInt *off)
535: {
539: if (combined) {
540: if (f < 0) {
541: if (dof) {PetscSectionGetDof(dofSection[0], p, dof);}
542: if (off) {PetscSectionGetOffset(dofSection[0], p, off);}
543: } else {
544: if (dof) {PetscSectionGetFieldDof(dofSection[0], p, f, dof);}
545: if (off) {PetscSectionGetFieldOffset(dofSection[0], p, f, off);}
546: }
547: } else {
548: if (f < 0) {
549: PC_PATCH *patch = (PC_PATCH *) pc->data;
550: PetscInt fdof, g;
552: if (dof) {
553: *dof = 0;
554: for (g = 0; g < patch->nsubspaces; ++g) {
555: PetscSectionGetDof(dofSection[g], p, &fdof);
556: *dof += fdof;
557: }
558: }
559: if (off) {PetscSectionGetOffset(dofSection[0], p, off);}
560: } else {
561: if (dof) {PetscSectionGetDof(dofSection[f], p, dof);}
562: if (off) {PetscSectionGetOffset(dofSection[f], p, off);}
563: }
564: }
565: return(0);
566: }
568: /* Given a hash table with a set of topological entities (pts), compute the degrees of
569: freedom in global concatenated numbering on those entities.
570: For Vanka smoothing, this needs to do something special: ignore dofs of the
571: constraint subspace on entities that aren't the base entity we're building the patch
572: around. */
573: static PetscErrorCode PCPatchGetPointDofs(PC pc, PetscHSetI pts, PetscHSetI dofs, PetscInt base, PetscInt exclude_subspace)
574: {
575: PC_PATCH *patch = (PC_PATCH *) pc->data;
576: PetscHashIter hi;
577: PetscInt ldof, loff;
578: PetscInt k, p;
582: PetscHSetIClear(dofs);
583: for (k = 0; k < patch->nsubspaces; ++k) {
584: PetscInt subspaceOffset = patch->subspaceOffsets[k];
585: PetscInt bs = patch->bs[k];
586: PetscInt j, l;
588: if (k == exclude_subspace) {
589: /* only get this subspace dofs at the base entity, not any others */
590: PCPatchGetGlobalDofs(pc, patch->dofSection, k, patch->combined, base, &ldof, &loff);
591: if (0 == ldof) continue;
592: for (j = loff; j < ldof + loff; ++j) {
593: for (l = 0; l < bs; ++l) {
594: PetscInt dof = bs*j + l + subspaceOffset;
595: PetscHSetIAdd(dofs, dof);
596: }
597: }
598: continue; /* skip the other dofs of this subspace */
599: }
601: PetscHashIterBegin(pts, hi);
602: while (!PetscHashIterAtEnd(pts, hi)) {
603: PetscHashIterGetKey(pts, hi, p);
604: PetscHashIterNext(pts, hi);
605: PCPatchGetGlobalDofs(pc, patch->dofSection, k, patch->combined, p, &ldof, &loff);
606: if (0 == ldof) continue;
607: for (j = loff; j < ldof + loff; ++j) {
608: for (l = 0; l < bs; ++l) {
609: PetscInt dof = bs*j + l + subspaceOffset;
610: PetscHSetIAdd(dofs, dof);
611: }
612: }
613: }
614: }
615: return(0);
616: }
618: /* Given two hash tables A and B, compute the keys in B that are not in A, and put them in C */
619: static PetscErrorCode PCPatchComputeSetDifference_Private(PetscHSetI A, PetscHSetI B, PetscHSetI C)
620: {
621: PetscHashIter hi;
622: PetscInt key;
623: PetscBool flg;
627: PetscHSetIClear(C);
628: PetscHashIterBegin(B, hi);
629: while (!PetscHashIterAtEnd(B, hi)) {
630: PetscHashIterGetKey(B, hi, key);
631: PetscHashIterNext(B, hi);
632: PetscHSetIHas(A, key, &flg);
633: if (!flg) {PetscHSetIAdd(C, key);}
634: }
635: return(0);
636: }
638: /*
639: * PCPatchCreateCellPatches - create patches.
640: *
641: * Input Parameters:
642: * + dm - The DMPlex object defining the mesh
643: *
644: * Output Parameters:
645: * + cellCounts - Section with counts of cells around each vertex
646: * . cells - IS of the cell point indices of cells in each patch
647: * . pointCounts - Section with counts of cells around each vertex
648: * - point - IS of the cell point indices of cells in each patch
649: */
650: static PetscErrorCode PCPatchCreateCellPatches(PC pc)
651: {
652: PC_PATCH *patch = (PC_PATCH *) pc->data;
653: DMLabel ghost = NULL;
654: DM dm, plex;
655: PetscHSetI ht, cht;
656: PetscSection cellCounts, pointCounts;
657: PetscInt *cellsArray, *pointsArray;
658: PetscInt numCells, numPoints;
659: const PetscInt *leaves;
660: PetscInt nleaves, pStart, pEnd, cStart, cEnd, vStart, vEnd, v;
661: PetscBool isFiredrake;
662: PetscErrorCode ierr;
665: /* Used to keep track of the cells in the patch. */
666: PetscHSetICreate(&ht);
667: PetscHSetICreate(&cht);
669: PCGetDM(pc, &dm);
670: if (!dm) SETERRQ(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONGSTATE, "DM not yet set on patch PC\n");
671: DMConvert(dm, DMPLEX, &plex);
672: DMPlexGetChart(plex, &pStart, &pEnd);
673: DMPlexGetHeightStratum(plex, 0, &cStart, &cEnd);
675: if (patch->user_patches) {
676: patch->userpatchconstructionop(pc, &patch->npatch, &patch->userIS, &patch->iterationSet, patch->userpatchconstructctx);
677: vStart = 0; vEnd = patch->npatch;
678: } else if (patch->codim < 0) {
679: if (patch->dim < 0) {DMPlexGetDepthStratum(plex, 0, &vStart, &vEnd);}
680: else {DMPlexGetDepthStratum(plex, patch->dim, &vStart, &vEnd);}
681: } else {DMPlexGetHeightStratum(plex, patch->codim, &vStart, &vEnd);}
682: patch->npatch = vEnd - vStart;
684: /* These labels mark the owned points. We only create patches around points that this process owns. */
685: DMHasLabel(dm, "pyop2_ghost", &isFiredrake);
686: if (isFiredrake) {
687: DMGetLabel(dm, "pyop2_ghost", &ghost);
688: DMLabelCreateIndex(ghost, pStart, pEnd);
689: } else {
690: PetscSF sf;
692: DMGetPointSF(dm, &sf);
693: PetscSFGetGraph(sf, NULL, &nleaves, &leaves, NULL);
694: nleaves = PetscMax(nleaves, 0);
695: }
697: PetscSectionCreate(PETSC_COMM_SELF, &patch->cellCounts);
698: PetscObjectSetName((PetscObject) patch->cellCounts, "Patch Cell Layout");
699: cellCounts = patch->cellCounts;
700: PetscSectionSetChart(cellCounts, vStart, vEnd);
701: PetscSectionCreate(PETSC_COMM_SELF, &patch->pointCounts);
702: PetscObjectSetName((PetscObject) patch->pointCounts, "Patch Point Layout");
703: pointCounts = patch->pointCounts;
704: PetscSectionSetChart(pointCounts, vStart, vEnd);
705: /* Count cells and points in the patch surrounding each entity */
706: for (v = vStart; v < vEnd; ++v) {
707: PetscHashIter hi;
708: PetscInt chtSize, loc = -1;
709: PetscBool flg;
711: if (!patch->user_patches) {
712: if (ghost) {DMLabelHasPoint(ghost, v, &flg);}
713: else {PetscFindInt(v, nleaves, leaves, &loc); flg = loc >=0 ? PETSC_TRUE : PETSC_FALSE;}
714: /* Not an owned entity, don't make a cell patch. */
715: if (flg) continue;
716: }
718: patch->patchconstructop((void *) patch, dm, v, ht);
719: PCPatchCompleteCellPatch(pc, ht, cht);
720: PetscHSetIGetSize(cht, &chtSize);
721: /* empty patch, continue */
722: if (chtSize == 0) continue;
724: /* safe because size(cht) > 0 from above */
725: PetscHashIterBegin(cht, hi);
726: while (!PetscHashIterAtEnd(cht, hi)) {
727: PetscInt point, pdof;
729: PetscHashIterGetKey(cht, hi, point);
730: PCPatchGetGlobalDofs(pc, patch->dofSection, -1, patch->combined, point, &pdof, NULL);
731: if (pdof) {PetscSectionAddDof(pointCounts, v, 1);}
732: if (point >= cStart && point < cEnd) {PetscSectionAddDof(cellCounts, v, 1);}
733: PetscHashIterNext(cht, hi);
734: }
735: }
736: if (isFiredrake) {DMLabelDestroyIndex(ghost);}
738: PetscSectionSetUp(cellCounts);
739: PetscSectionGetStorageSize(cellCounts, &numCells);
740: PetscMalloc1(numCells, &cellsArray);
741: PetscSectionSetUp(pointCounts);
742: PetscSectionGetStorageSize(pointCounts, &numPoints);
743: PetscMalloc1(numPoints, &pointsArray);
745: /* Now that we know how much space we need, run through again and actually remember the cells. */
746: for (v = vStart; v < vEnd; v++ ) {
747: PetscHashIter hi;
748: PetscInt dof, off, cdof, coff, pdof, n = 0, cn = 0;
750: PetscSectionGetDof(pointCounts, v, &dof);
751: PetscSectionGetOffset(pointCounts, v, &off);
752: PetscSectionGetDof(cellCounts, v, &cdof);
753: PetscSectionGetOffset(cellCounts, v, &coff);
754: if (dof <= 0) continue;
755: patch->patchconstructop((void *) patch, dm, v, ht);
756: PCPatchCompleteCellPatch(pc, ht, cht);
757: PetscHashIterBegin(cht, hi);
758: while (!PetscHashIterAtEnd(cht, hi)) {
759: PetscInt point;
761: PetscHashIterGetKey(cht, hi, point);
762: PCPatchGetGlobalDofs(pc, patch->dofSection, -1, patch->combined, point, &pdof, NULL);
763: if (pdof) {pointsArray[off + n++] = point;}
764: if (point >= cStart && point < cEnd) {cellsArray[coff + cn++] = point;}
765: PetscHashIterNext(cht, hi);
766: }
767: if (cn != cdof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of cells in patch %D is %D, but should be %D", v, cn, cdof);
768: if (n != dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of points in patch %D is %D, but should be %D", v, n, dof);
769: }
770: PetscHSetIDestroy(&ht);
771: PetscHSetIDestroy(&cht);
772: DMDestroy(&plex);
774: ISCreateGeneral(PETSC_COMM_SELF, numCells, cellsArray, PETSC_OWN_POINTER, &patch->cells);
775: PetscObjectSetName((PetscObject) patch->cells, "Patch Cells");
776: if (patch->viewCells) {
777: ObjectView((PetscObject) patch->cellCounts, patch->viewerCells, patch->formatCells);
778: ObjectView((PetscObject) patch->cells, patch->viewerCells, patch->formatCells);
779: }
780: ISCreateGeneral(PETSC_COMM_SELF, numPoints, pointsArray, PETSC_OWN_POINTER, &patch->points);
781: PetscObjectSetName((PetscObject) patch->points, "Patch Points");
782: if (patch->viewPoints) {
783: ObjectView((PetscObject) patch->pointCounts, patch->viewerPoints, patch->formatPoints);
784: ObjectView((PetscObject) patch->points, patch->viewerPoints, patch->formatPoints);
785: }
786: return(0);
787: }
789: /*
790: * PCPatchCreateCellPatchDiscretisationInfo - Build the dof maps for cell patches
791: *
792: * Input Parameters:
793: * + dm - The DMPlex object defining the mesh
794: * . cellCounts - Section with counts of cells around each vertex
795: * . cells - IS of the cell point indices of cells in each patch
796: * . cellNumbering - Section mapping plex cell points to Firedrake cell indices.
797: * . nodesPerCell - number of nodes per cell.
798: * - cellNodeMap - map from cells to node indices (nodesPerCell * numCells)
799: *
800: * Output Parameters:
801: * + dofs - IS of local dof numbers of each cell in the patch, where local is a patch local numbering
802: * . gtolCounts - Section with counts of dofs per cell patch
803: * - gtol - IS mapping from global dofs to local dofs for each patch.
804: */
805: static PetscErrorCode PCPatchCreateCellPatchDiscretisationInfo(PC pc)
806: {
807: PC_PATCH *patch = (PC_PATCH *) pc->data;
808: PetscSection cellCounts = patch->cellCounts;
809: PetscSection pointCounts = patch->pointCounts;
810: PetscSection gtolCounts;
811: IS cells = patch->cells;
812: IS points = patch->points;
813: PetscSection cellNumbering = patch->cellNumbering;
814: PetscInt Nf = patch->nsubspaces;
815: PetscInt numCells, numPoints;
816: PetscInt numDofs;
817: PetscInt numGlobalDofs;
818: PetscInt totalDofsPerCell = patch->totalDofsPerCell;
819: PetscInt vStart, vEnd, v;
820: const PetscInt *cellsArray, *pointsArray;
821: PetscInt *newCellsArray = NULL;
822: PetscInt *dofsArray = NULL;
823: PetscInt *offsArray = NULL;
824: PetscInt *asmArray = NULL;
825: PetscInt *globalDofsArray = NULL;
826: PetscInt globalIndex = 0;
827: PetscInt key = 0;
828: PetscInt asmKey = 0;
829: DM dm = NULL;
830: const PetscInt *bcNodes = NULL;
831: PetscHMapI ht;
832: PetscHSetI globalBcs;
833: PetscInt numBcs;
834: PetscHSetI ownedpts, seenpts, owneddofs, seendofs, artificialbcs;
835: PetscInt pStart, pEnd, p, i;
836: PetscErrorCode ierr;
840: PCGetDM(pc, &dm);
841: /* dofcounts section is cellcounts section * dofPerCell */
842: PetscSectionGetStorageSize(cellCounts, &numCells);
843: PetscSectionGetStorageSize(patch->pointCounts, &numPoints);
844: numDofs = numCells * totalDofsPerCell;
845: PetscMalloc1(numDofs, &dofsArray);
846: PetscMalloc1(numPoints*Nf, &offsArray);
847: PetscMalloc1(numDofs, &asmArray);
848: PetscMalloc1(numCells, &newCellsArray);
849: PetscSectionGetChart(cellCounts, &vStart, &vEnd);
850: PetscSectionCreate(PETSC_COMM_SELF, &patch->gtolCounts);
851: gtolCounts = patch->gtolCounts;
852: PetscSectionSetChart(gtolCounts, vStart, vEnd);
853: PetscObjectSetName((PetscObject) patch->gtolCounts, "Patch Global Index Section");
855: /* Outside the patch loop, get the dofs that are globally-enforced Dirichlet
856: conditions */
857: PetscHSetICreate(&globalBcs);
858: ISGetIndices(patch->ghostBcNodes, &bcNodes);
859: ISGetSize(patch->ghostBcNodes, &numBcs);
860: for (i = 0; i < numBcs; ++i) {
861: PetscHSetIAdd(globalBcs, bcNodes[i]); /* these are already in concatenated numbering */
862: }
863: ISRestoreIndices(patch->ghostBcNodes, &bcNodes);
864: ISDestroy(&patch->ghostBcNodes); /* memory optimisation */
866: /* Hash tables for artificial BC construction */
867: PetscHSetICreate(&ownedpts);
868: PetscHSetICreate(&seenpts);
869: PetscHSetICreate(&owneddofs);
870: PetscHSetICreate(&seendofs);
871: PetscHSetICreate(&artificialbcs);
873: ISGetIndices(cells, &cellsArray);
874: ISGetIndices(points, &pointsArray);
875: PetscHMapICreate(&ht);
876: for (v = vStart; v < vEnd; ++v) {
877: PetscInt localIndex = 0;
878: PetscInt dof, off, i, j, k, l;
880: PetscHMapIClear(ht);
881: PetscSectionGetDof(cellCounts, v, &dof);
882: PetscSectionGetOffset(cellCounts, v, &off);
883: if (dof <= 0) continue;
885: /* Calculate the global numbers of the artificial BC dofs here first */
886: patch->patchconstructop((void*)patch, dm, v, ownedpts);
887: PCPatchCompleteCellPatch(pc, ownedpts, seenpts);
888: PCPatchGetPointDofs(pc, ownedpts, owneddofs, v, patch->exclude_subspace);
889: PCPatchGetPointDofs(pc, seenpts, seendofs, v, -1);
890: PCPatchComputeSetDifference_Private(owneddofs, seendofs, artificialbcs);
891: if (patch->viewPatches) {
892: PetscHSetI globalbcdofs;
893: PetscHashIter hi;
894: MPI_Comm comm = PetscObjectComm((PetscObject)pc);
896: PetscHSetICreate(&globalbcdofs);
897: PetscSynchronizedPrintf(comm, "Patch %d: owned dofs:\n", v);
898: PetscHashIterBegin(owneddofs, hi);
899: while (!PetscHashIterAtEnd(owneddofs, hi)) {
900: PetscInt globalDof;
902: PetscHashIterGetKey(owneddofs, hi, globalDof);
903: PetscHashIterNext(owneddofs, hi);
904: PetscSynchronizedPrintf(comm, "%d ", globalDof);
905: }
906: PetscSynchronizedPrintf(comm, "\n");
907: PetscSynchronizedPrintf(comm, "Patch %d: seen dofs:\n", v);
908: PetscHashIterBegin(seendofs, hi);
909: while (!PetscHashIterAtEnd(seendofs, hi)) {
910: PetscInt globalDof;
911: PetscBool flg;
913: PetscHashIterGetKey(seendofs, hi, globalDof);
914: PetscHashIterNext(seendofs, hi);
915: PetscSynchronizedPrintf(comm, "%d ", globalDof);
917: PetscHSetIHas(globalBcs, globalDof, &flg);
918: if (flg) {PetscHSetIAdd(globalbcdofs, globalDof);}
919: }
920: PetscSynchronizedPrintf(comm, "\n");
921: PetscSynchronizedPrintf(comm, "Patch %d: global BCs:\n", v);
922: PetscHSetIGetSize(globalbcdofs, &numBcs);
923: if (numBcs > 0) {
924: PetscHashIterBegin(globalbcdofs, hi);
925: while (!PetscHashIterAtEnd(globalbcdofs, hi)) {
926: PetscInt globalDof;
927: PetscHashIterGetKey(globalbcdofs, hi, globalDof);
928: PetscHashIterNext(globalbcdofs, hi);
929: PetscSynchronizedPrintf(comm, "%d ", globalDof);
930: }
931: }
932: PetscSynchronizedPrintf(comm, "\n");
933: PetscSynchronizedPrintf(comm, "Patch %d: artificial BCs:\n", v);
934: PetscHSetIGetSize(artificialbcs, &numBcs);
935: if (numBcs > 0) {
936: PetscHashIterBegin(artificialbcs, hi);
937: while (!PetscHashIterAtEnd(artificialbcs, hi)) {
938: PetscInt globalDof;
939: PetscHashIterGetKey(artificialbcs, hi, globalDof);
940: PetscHashIterNext(artificialbcs, hi);
941: PetscSynchronizedPrintf(comm, "%d ", globalDof);
942: }
943: }
944: PetscSynchronizedPrintf(comm, "\n\n");
945: PetscHSetIDestroy(&globalbcdofs);
946: }
947: for (k = 0; k < patch->nsubspaces; ++k) {
948: const PetscInt *cellNodeMap = patch->cellNodeMap[k];
949: PetscInt nodesPerCell = patch->nodesPerCell[k];
950: PetscInt subspaceOffset = patch->subspaceOffsets[k];
951: PetscInt bs = patch->bs[k];
953: for (i = off; i < off + dof; ++i) {
954: /* Walk over the cells in this patch. */
955: const PetscInt c = cellsArray[i];
956: PetscInt cell = c;
958: /* TODO Change this to an IS */
959: if (cellNumbering) {
960: PetscSectionGetDof(cellNumbering, c, &cell);
961: if (cell <= 0) SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_OUTOFRANGE, "Cell %D doesn't appear in cell numbering map", c);
962: PetscSectionGetOffset(cellNumbering, c, &cell);
963: }
964: newCellsArray[i] = cell;
965: for (j = 0; j < nodesPerCell; ++j) {
966: /* For each global dof, map it into contiguous local storage. */
967: const PetscInt globalDof = cellNodeMap[cell*nodesPerCell + j]*bs + subspaceOffset;
968: /* finally, loop over block size */
969: for (l = 0; l < bs; ++l) {
970: PetscInt localDof;
971: PetscBool isGlobalBcDof, isArtificialBcDof;
973: /* first, check if this is either a globally enforced or locally enforced BC dof */
974: PetscHSetIHas(globalBcs, globalDof + l, &isGlobalBcDof);
975: PetscHSetIHas(artificialbcs, globalDof + l, &isArtificialBcDof);
977: /* if it's either, don't ever give it a local dof number */
978: if (isGlobalBcDof || isArtificialBcDof) {
979: dofsArray[globalIndex++] = -1; /* don't use this in assembly in this patch */
980: } else {
981: PetscHMapIGet(ht, globalDof + l, &localDof);
982: if (localDof == -1) {
983: localDof = localIndex++;
984: PetscHMapISet(ht, globalDof + l, localDof);
985: }
986: if ( globalIndex >= numDofs ) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Found more dofs %D than expected %D", globalIndex+1, numDofs);
987: /* And store. */
988: dofsArray[globalIndex++] = localDof;
989: }
990: }
991: }
992: }
993: }
994: /* How many local dofs in this patch? */
995: PetscHMapIGetSize(ht, &dof);
996: PetscSectionSetDof(gtolCounts, v, dof);
997: }
998: if (globalIndex != numDofs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Expected number of dofs (%d) doesn't match found number (%d)", numDofs, globalIndex);
999: PetscSectionSetUp(gtolCounts);
1000: PetscSectionGetStorageSize(gtolCounts, &numGlobalDofs);
1001: PetscMalloc1(numGlobalDofs, &globalDofsArray);
1003: /* Now populate the global to local map. This could be merged into the above loop if we were willing to deal with reallocs. */
1004: for (v = vStart; v < vEnd; ++v) {
1005: PetscHashIter hi;
1006: PetscInt dof, off, Np, ooff, i, j, k, l;
1008: PetscHMapIClear(ht);
1009: PetscSectionGetDof(cellCounts, v, &dof);
1010: PetscSectionGetOffset(cellCounts, v, &off);
1011: PetscSectionGetDof(pointCounts, v, &Np);
1012: PetscSectionGetOffset(pointCounts, v, &ooff);
1013: if (dof <= 0) continue;
1015: for (k = 0; k < patch->nsubspaces; ++k) {
1016: const PetscInt *cellNodeMap = patch->cellNodeMap[k];
1017: PetscInt nodesPerCell = patch->nodesPerCell[k];
1018: PetscInt subspaceOffset = patch->subspaceOffsets[k];
1019: PetscInt bs = patch->bs[k];
1020: PetscInt goff;
1022: for (i = off; i < off + dof; ++i) {
1023: /* Reconstruct mapping of global-to-local on this patch. */
1024: const PetscInt c = cellsArray[i];
1025: PetscInt cell = c;
1027: if (cellNumbering) {PetscSectionGetOffset(cellNumbering, c, &cell);}
1028: for (j = 0; j < nodesPerCell; ++j) {
1029: for (l = 0; l < bs; ++l) {
1030: const PetscInt globalDof = cellNodeMap[cell*nodesPerCell + j]*bs + l + subspaceOffset;
1031: const PetscInt localDof = dofsArray[key++];
1033: if (localDof >= 0) {PetscHMapISet(ht, globalDof, localDof);}
1034: }
1035: }
1036: }
1038: /* Shove it in the output data structure. */
1039: PetscSectionGetOffset(gtolCounts, v, &goff);
1040: PetscHashIterBegin(ht, hi);
1041: while (!PetscHashIterAtEnd(ht, hi)) {
1042: PetscInt globalDof, localDof;
1044: PetscHashIterGetKey(ht, hi, globalDof);
1045: PetscHashIterGetVal(ht, hi, localDof);
1046: if (globalDof >= 0) globalDofsArray[goff + localDof] = globalDof;
1047: PetscHashIterNext(ht, hi);
1048: }
1050: for (p = 0; p < Np; ++p) {
1051: const PetscInt point = pointsArray[ooff + p];
1052: PetscInt globalDof, localDof;
1054: PCPatchGetGlobalDofs(pc, patch->dofSection, k, patch->combined, point, NULL, &globalDof);
1055: PetscHMapIGet(ht, globalDof, &localDof);
1056: offsArray[(ooff + p)*Nf + k] = localDof;
1057: }
1058: }
1060: PetscHSetIDestroy(&globalBcs);
1061: PetscHSetIDestroy(&ownedpts);
1062: PetscHSetIDestroy(&seenpts);
1063: PetscHSetIDestroy(&owneddofs);
1064: PetscHSetIDestroy(&seendofs);
1065: PetscHSetIDestroy(&artificialbcs);
1067: /* At this point, we have a hash table ht built that maps globalDof -> localDof.
1068: We need to create the dof table laid out cellwise first, then by subspace,
1069: as the assembler assembles cell-wise and we need to stuff the different
1070: contributions of the different function spaces to the right places. So we loop
1071: over cells, then over subspaces. */
1072: if (patch->nsubspaces > 1) { /* for nsubspaces = 1, data we need is already in dofsArray */
1073: for (i = off; i < off + dof; ++i) {
1074: const PetscInt c = cellsArray[i];
1075: PetscInt cell = c;
1077: if (cellNumbering) {PetscSectionGetOffset(cellNumbering, c, &cell);}
1078: for (k = 0; k < patch->nsubspaces; ++k) {
1079: const PetscInt *cellNodeMap = patch->cellNodeMap[k];
1080: PetscInt nodesPerCell = patch->nodesPerCell[k];
1081: PetscInt subspaceOffset = patch->subspaceOffsets[k];
1082: PetscInt bs = patch->bs[k];
1084: for (j = 0; j < nodesPerCell; ++j) {
1085: for (l = 0; l < bs; ++l) {
1086: const PetscInt globalDof = cellNodeMap[cell*nodesPerCell + j]*bs + l + subspaceOffset;
1087: PetscInt localDof;
1089: PetscHMapIGet(ht, globalDof, &localDof);
1090: /* If it's not in the hash table, i.e. is a BC dof,
1091: then the PetscHSetIMap above gives -1, which matches
1092: exactly the convention for PETSc's matrix assembly to
1093: ignore the dof. So we don't need to do anything here */
1094: asmArray[asmKey++] = localDof;
1095: }
1096: }
1097: }
1098: }
1099: }
1100: }
1101: if (1 == patch->nsubspaces) {PetscMemcpy(asmArray, dofsArray, numDofs * sizeof(PetscInt));}
1103: PetscHMapIDestroy(&ht);
1104: ISRestoreIndices(cells, &cellsArray);
1105: ISRestoreIndices(points, &pointsArray);
1106: PetscFree(dofsArray);
1107: /* Create placeholder section for map from points to patch dofs */
1108: PetscSectionCreate(PETSC_COMM_SELF, &patch->patchSection);
1109: PetscSectionSetNumFields(patch->patchSection, patch->nsubspaces);
1110: PetscSectionGetChart(patch->dofSection[0], &pStart, &pEnd);
1111: PetscSectionSetChart(patch->patchSection, pStart, pEnd);
1112: for (p = pStart; p < pEnd; ++p) {
1113: PetscInt dof, fdof, f;
1115: PetscSectionGetDof(patch->dofSection[0], p, &dof);
1116: PetscSectionSetDof(patch->patchSection, p, dof);
1117: for (f = 0; f < patch->nsubspaces; ++f) {
1118: PetscSectionGetFieldDof(patch->dofSection[0], p, f, &fdof);
1119: if (ierr == 0) {
1120: PetscSectionSetFieldDof(patch->patchSection, p, f, fdof);
1121: }
1122: else {
1123: /* assume only one field */
1124: PetscSectionSetFieldDof(patch->patchSection, p, f, dof);
1125: }
1126: }
1127: }
1128: PetscSectionSetUp(patch->patchSection);
1129: PetscSectionSetUseFieldOffsets(patch->patchSection, PETSC_TRUE);
1130: /* Replace cell indices with firedrake-numbered ones. */
1131: ISGeneralSetIndices(cells, numCells, (const PetscInt *) newCellsArray, PETSC_OWN_POINTER);
1132: ISCreateGeneral(PETSC_COMM_SELF, numGlobalDofs, globalDofsArray, PETSC_OWN_POINTER, &patch->gtol);
1133: PetscObjectSetName((PetscObject) patch->gtol, "Global Indices");
1134: PetscSectionViewFromOptions(patch->gtolCounts, (PetscObject) pc, "-pc_patch_g2l_view");
1135: ISViewFromOptions(patch->gtol, (PetscObject) pc, "-pc_patch_g2l_view");
1136: ISCreateGeneral(PETSC_COMM_SELF, numDofs, asmArray, PETSC_OWN_POINTER, &patch->dofs);
1137: ISCreateGeneral(PETSC_COMM_SELF, numPoints*Nf, offsArray, PETSC_OWN_POINTER, &patch->offs);
1138: return(0);
1139: }
1141: static PetscErrorCode PCPatchZeroFillMatrix_Private(Mat mat, const PetscInt ncell, const PetscInt ndof, const PetscInt *dof)
1142: {
1143: PetscScalar *values = NULL;
1144: PetscInt rows, c, i;
1145: PetscErrorCode ierr;
1148: PetscCalloc1(ndof*ndof, &values);
1149: for (c = 0; c < ncell; ++c) {
1150: const PetscInt *idx = &dof[ndof*c];
1151: MatSetValues(mat, ndof, idx, ndof, idx, values, INSERT_VALUES);
1152: }
1153: MatGetLocalSize(mat, &rows, NULL);
1154: for (i = 0; i < rows; ++i) {
1155: MatSetValues(mat, 1, &i, 1, &i, values, INSERT_VALUES);
1156: }
1157: MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY);
1158: MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY);
1159: PetscFree(values);
1160: return(0);
1161: }
1163: static PetscErrorCode PCPatchCreateMatrix_Private(PC pc, PetscInt point, Mat *mat)
1164: {
1165: PC_PATCH *patch = (PC_PATCH *) pc->data;
1166: Vec x, y;
1167: PetscBool flg;
1168: PetscInt csize, rsize;
1169: const char *prefix = NULL;
1173: x = patch->patchX[point];
1174: y = patch->patchY[point];
1175: VecGetSize(x, &csize);
1176: VecGetSize(y, &rsize);
1177: MatCreate(PETSC_COMM_SELF, mat);
1178: PCGetOptionsPrefix(pc, &prefix);
1179: MatSetOptionsPrefix(*mat, prefix);
1180: MatAppendOptionsPrefix(*mat, "pc_patch_sub_");
1181: if (patch->sub_mat_type) {MatSetType(*mat, patch->sub_mat_type);}
1182: else if (!patch->sub_mat_type) {MatSetType(*mat, MATDENSE);}
1183: MatSetSizes(*mat, rsize, csize, rsize, csize);
1184: PetscObjectTypeCompare((PetscObject) *mat, MATDENSE, &flg);
1185: if (!flg) {PetscObjectTypeCompare((PetscObject)*mat, MATSEQDENSE, &flg);}
1186: /* Sparse patch matrices */
1187: if (!flg) {
1188: PetscBT bt;
1189: PetscInt *dnnz = NULL;
1190: const PetscInt *dofsArray = NULL;
1191: PetscInt pStart, pEnd, ncell, offset, c, i, j;
1193: ISGetIndices(patch->dofs, &dofsArray);
1194: PetscSectionGetChart(patch->cellCounts, &pStart, &pEnd);
1195: point += pStart;
1196: if (point >= pEnd) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Operator point %D not in [%D, %D)\n", point, pStart, pEnd);
1197: PetscSectionGetDof(patch->cellCounts, point, &ncell);
1198: PetscSectionGetOffset(patch->cellCounts, point, &offset);
1199: PetscCalloc1(rsize, &dnnz);
1200: PetscLogEventBegin(PC_Patch_Prealloc, pc, 0, 0, 0);
1201: /* XXX: This uses N^2 bits to store the sparsity pattern on a
1202: * patch. This is probably OK if the patches are not too big,
1203: * but could use quite a bit of memory for planes in 3D.
1204: * Should we switch based on the value of rsize to a
1205: * hash-table (slower, but more memory efficient) approach? */
1206: PetscBTCreate(rsize*rsize, &bt);
1207: for (c = 0; c < ncell; ++c) {
1208: const PetscInt *idx = dofsArray + (offset + c)*patch->totalDofsPerCell;
1209: for (i = 0; i < patch->totalDofsPerCell; ++i) {
1210: const PetscInt row = idx[i];
1211: if (row < 0) continue;
1212: for (j = 0; j < patch->totalDofsPerCell; ++j) {
1213: const PetscInt col = idx[j];
1214: const PetscInt key = row*rsize + col;
1215: if (col < 0) continue;
1216: if (!PetscBTLookupSet(bt, key)) ++dnnz[row];
1217: }
1218: }
1219: }
1220: PetscBTDestroy(&bt);
1221: MatXAIJSetPreallocation(*mat, 1, dnnz, NULL, NULL, NULL);
1222: PetscFree(dnnz);
1223: PCPatchZeroFillMatrix_Private(*mat, ncell, patch->totalDofsPerCell, &dofsArray[offset*patch->totalDofsPerCell]);
1224: PetscLogEventEnd(PC_Patch_Prealloc, pc, 0, 0, 0);
1225: ISRestoreIndices(patch->dofs, &dofsArray);
1226: }
1227: MatSetUp(*mat);
1228: return(0);
1229: }
1231: static PetscErrorCode PCPatchComputeOperator_DMPlex_Private(PC pc, PetscInt patchNum, Mat J, IS cellIS, PetscInt n, const PetscInt *l2p, void *ctx)
1232: {
1233: PC_PATCH *patch = (PC_PATCH *) pc->data;
1234: DM dm;
1235: PetscSection s;
1236: const PetscInt *parray, *oarray;
1237: PetscInt Nf = patch->nsubspaces, Np, poff, p, f;
1238: PetscErrorCode ierr;
1241: PCGetDM(pc, &dm);
1242: DMGetDefaultSection(dm, &s);
1243: /* Set offset into patch */
1244: PetscSectionGetDof(patch->pointCounts, patchNum, &Np);
1245: PetscSectionGetOffset(patch->pointCounts, patchNum, &poff);
1246: ISGetIndices(patch->points, &parray);
1247: ISGetIndices(patch->offs, &oarray);
1248: for (f = 0; f < Nf; ++f) {
1249: for (p = 0; p < Np; ++p) {
1250: const PetscInt point = parray[poff+p];
1251: PetscInt dof;
1253: PetscSectionGetFieldDof(patch->patchSection, point, f, &dof);
1254: PetscSectionSetFieldOffset(patch->patchSection, point, f, oarray[(poff+p)*Nf+f]);
1255: if (patch->nsubspaces == 1) {PetscSectionSetOffset(patch->patchSection, point, oarray[(poff+p)*Nf+f]);}
1256: else {PetscSectionSetOffset(patch->patchSection, point, -1);}
1257: }
1258: }
1259: ISRestoreIndices(patch->points, &parray);
1260: ISRestoreIndices(patch->offs, &oarray);
1261: if (patch->viewSection) {ObjectView((PetscObject) patch->patchSection, patch->viewerSection, patch->formatSection);}
1262: /* TODO Shut off MatViewFromOptions() in MatAssemblyEnd() here */
1263: DMPlexComputeJacobian_Patch_Internal(pc->dm, patch->patchSection, patch->patchSection, cellIS, 0.0, 0.0, NULL, NULL, J, J, ctx);
1264: return(0);
1265: }
1267: static PetscErrorCode PCPatchComputeOperator_Private(PC pc, Mat mat, PetscInt point)
1268: {
1269: PC_PATCH *patch = (PC_PATCH *) pc->data;
1270: const PetscInt *dofsArray;
1271: const PetscInt *cellsArray;
1272: PetscInt ncell, offset, pStart, pEnd;
1273: PetscErrorCode ierr;
1276: PetscLogEventBegin(PC_Patch_ComputeOp, pc, 0, 0, 0);
1277: if (!patch->usercomputeop) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call PCPatchSetComputeOperator() to set user callback\n");
1278: ISGetIndices(patch->dofs, &dofsArray);
1279: ISGetIndices(patch->cells, &cellsArray);
1280: PetscSectionGetChart(patch->cellCounts, &pStart, &pEnd);
1282: point += pStart;
1283: if (point >= pEnd) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Operator point %D not in [%D, %D)\n", point, pStart, pEnd);
1285: PetscSectionGetDof(patch->cellCounts, point, &ncell);
1286: PetscSectionGetOffset(patch->cellCounts, point, &offset);
1287: if (ncell <= 0) {
1288: PetscLogEventEnd(PC_Patch_ComputeOp, pc, 0, 0, 0);
1289: return(0);
1290: }
1291: PetscStackPush("PCPatch user callback");
1292: /* Cannot reuse the same IS because the geometry info is being cached in it */
1293: ISCreateGeneral(PETSC_COMM_SELF, ncell, cellsArray + offset, PETSC_USE_POINTER, &patch->cellIS);
1294: patch->usercomputeop(pc, point, mat, patch->cellIS, ncell*patch->totalDofsPerCell, dofsArray + offset*patch->totalDofsPerCell, patch->usercomputectx);
1295: PetscStackPop;
1296: ISDestroy(&patch->cellIS);
1297: ISRestoreIndices(patch->dofs, &dofsArray);
1298: ISRestoreIndices(patch->cells, &cellsArray);
1299: if (patch->viewMatrix) {
1300: char name[PETSC_MAX_PATH_LEN];
1302: PetscSNPrintf(name, PETSC_MAX_PATH_LEN-1, "Patch matrix for Point %D", point);
1303: PetscObjectSetName((PetscObject) mat, name);
1304: ObjectView((PetscObject) mat, patch->viewerMatrix, patch->formatMatrix);
1305: }
1306: PetscLogEventEnd(PC_Patch_ComputeOp, pc, 0, 0, 0);
1307: return(0);
1308: }
1310: static PetscErrorCode PCPatch_ScatterLocal_Private(PC pc, PetscInt p, Vec x, Vec y, InsertMode mode, ScatterMode scat)
1311: {
1312: PC_PATCH *patch = (PC_PATCH *) pc->data;
1313: const PetscScalar *xArray = NULL;
1314: PetscScalar *yArray = NULL;
1315: const PetscInt *gtolArray = NULL;
1316: PetscInt dof, offset, lidx;
1317: PetscErrorCode ierr;
1320: PetscLogEventBegin(PC_Patch_Scatter, pc, 0, 0, 0);
1321: VecGetArrayRead(x, &xArray);
1322: VecGetArray(y, &yArray);
1323: PetscSectionGetDof(patch->gtolCounts, p, &dof);
1324: PetscSectionGetOffset(patch->gtolCounts, p, &offset);
1325: ISGetIndices(patch->gtol, >olArray);
1326: if (mode == INSERT_VALUES && scat != SCATTER_FORWARD) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Can't insert if not scattering forward\n");
1327: if (mode == ADD_VALUES && scat != SCATTER_REVERSE) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Can't add if not scattering reverse\n");
1328: for (lidx = 0; lidx < dof; ++lidx) {
1329: const PetscInt gidx = gtolArray[offset+lidx];
1331: if (mode == INSERT_VALUES) yArray[lidx] = xArray[gidx]; /* Forward */
1332: else yArray[gidx] += xArray[lidx]; /* Reverse */
1333: }
1334: ISRestoreIndices(patch->gtol, >olArray);
1335: VecRestoreArrayRead(x, &xArray);
1336: VecRestoreArray(y, &yArray);
1337: PetscLogEventEnd(PC_Patch_Scatter, pc, 0, 0, 0);
1338: return(0);
1339: }
1341: static PetscErrorCode PCSetUp_PATCH(PC pc)
1342: {
1343: PC_PATCH *patch = (PC_PATCH *) pc->data;
1344: PetscInt i;
1345: const char *prefix;
1346: PetscErrorCode ierr;
1349: if (!pc->setupcalled) {
1350: PetscInt pStart, pEnd, p;
1351: PetscInt localSize;
1353: PetscLogEventBegin(PC_Patch_CreatePatches, pc, 0, 0, 0);
1355: if (!patch->nsubspaces) {
1356: DM dm;
1357: PetscDS prob;
1358: PetscSection s;
1359: PetscInt cStart, cEnd, c, Nf, f, numGlobalBcs = 0, *globalBcs, *Nb, totNb = 0, **cellDofs;
1361: PCGetDM(pc, &dm);
1362: if (!dm) SETERRQ(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Must set DM for PCPATCH or call PCPatchSetDiscretisationInfo()");
1363: DMGetDefaultSection(dm, &s);
1364: PetscSectionGetNumFields(s, &Nf);
1365: PetscSectionGetChart(s, &pStart, &pEnd);
1366: for (p = pStart; p < pEnd; ++p) {
1367: PetscInt cdof;
1368: PetscSectionGetConstraintDof(s, p, &cdof);
1369: numGlobalBcs += cdof;
1370: }
1371: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1372: DMGetDS(dm, &prob);
1373: PetscMalloc3(Nf, &Nb, Nf, &cellDofs, numGlobalBcs, &globalBcs);
1374: for (f = 0; f < Nf; ++f) {
1375: PetscFE fe;
1376: PetscDualSpace sp;
1377: PetscInt cdoff = 0;
1379: PetscDSGetDiscretization(prob, f, (PetscObject *) &fe);
1380: /* PetscFEGetNumComponents(fe, &Nc[f]); */
1381: PetscFEGetDualSpace(fe, &sp);
1382: PetscDualSpaceGetDimension(sp, &Nb[f]);
1383: totNb += Nb[f];
1385: PetscMalloc1((cEnd-cStart)*Nb[f], &cellDofs[f]);
1386: for (c = cStart; c < cEnd; ++c) {
1387: PetscInt *closure = NULL;
1388: PetscInt clSize = 0, cl;
1390: DMPlexGetTransitiveClosure(dm, c, PETSC_TRUE, &clSize, &closure);
1391: for (cl = 0; cl < clSize*2; cl += 2) {
1392: const PetscInt p = closure[cl];
1393: PetscInt fdof, d, foff;
1395: PetscSectionGetFieldDof(s, p, f, &fdof);
1396: PetscSectionGetFieldOffset(s, p, f, &foff);
1397: for (d = 0; d < fdof; ++d, ++cdoff) cellDofs[f][cdoff] = foff + d;
1398: }
1399: DMPlexRestoreTransitiveClosure(dm, c, PETSC_TRUE, &clSize, &closure);
1400: }
1401: if (cdoff != (cEnd-cStart)*Nb[f]) SETERRQ4(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_SIZ, "Total number of cellDofs %D for field %D should be Nc (%D) * cellDof (%D)", cdoff, f, cEnd-cStart, Nb[f]);
1402: }
1403: numGlobalBcs = 0;
1404: for (p = pStart; p < pEnd; ++p) {
1405: const PetscInt *ind;
1406: PetscInt off, cdof, d;
1408: PetscSectionGetOffset(s, p, &off);
1409: PetscSectionGetConstraintDof(s, p, &cdof);
1410: PetscSectionGetConstraintIndices(s, p, &ind);
1411: for (d = 0; d < cdof; ++d) globalBcs[numGlobalBcs++] = off + ind[d];
1412: }
1414: PCPatchSetDiscretisationInfoCombined(pc, dm, Nb, (const PetscInt **) cellDofs, numGlobalBcs, globalBcs, numGlobalBcs, globalBcs);
1415: for (f = 0; f < Nf; ++f) {
1416: PetscFree(cellDofs[f]);
1417: }
1418: PetscFree3(Nb, cellDofs, globalBcs);
1419: PCPatchSetComputeOperator(pc, PCPatchComputeOperator_DMPlex_Private, NULL);
1420: }
1422: localSize = patch->subspaceOffsets[patch->nsubspaces];
1423: VecCreateSeq(PETSC_COMM_SELF, localSize, &patch->localX);
1424: VecSetUp(patch->localX);
1425: VecDuplicate(patch->localX, &patch->localY);
1426: PCPatchCreateCellPatches(pc);
1427: PCPatchCreateCellPatchDiscretisationInfo(pc);
1429: /* OK, now build the work vectors */
1430: PetscSectionGetChart(patch->gtolCounts, &pStart, &pEnd);
1431: PetscMalloc1(patch->npatch, &patch->patchX);
1432: PetscMalloc1(patch->npatch, &patch->patchY);
1433: for (p = pStart; p < pEnd; ++p) {
1434: PetscInt dof;
1436: PetscSectionGetDof(patch->gtolCounts, p, &dof);
1437: VecCreateSeq(PETSC_COMM_SELF, dof, &patch->patchX[p-pStart]);
1438: VecSetUp(patch->patchX[p-pStart]);
1439: VecCreateSeq(PETSC_COMM_SELF, dof, &patch->patchY[p-pStart]);
1440: VecSetUp(patch->patchY[p-pStart]);
1441: }
1442: PetscMalloc1(patch->npatch, &patch->ksp);
1443: PCGetOptionsPrefix(pc, &prefix);
1444: for (i = 0; i < patch->npatch; ++i) {
1445: PC subpc;
1447: KSPCreate(PETSC_COMM_SELF, &patch->ksp[i]);
1448: KSPSetOptionsPrefix(patch->ksp[i], prefix);
1449: KSPAppendOptionsPrefix(patch->ksp[i], "sub_");
1450: PetscObjectIncrementTabLevel((PetscObject) patch->ksp[i], (PetscObject) pc, 1);
1451: KSPGetPC(patch->ksp[i], &subpc);
1452: PetscObjectIncrementTabLevel((PetscObject) subpc, (PetscObject) pc, 1);
1453: PetscLogObjectParent((PetscObject) pc, (PetscObject) patch->ksp[i]);
1454: }
1455: if (patch->save_operators) {
1456: PetscMalloc1(patch->npatch, &patch->mat);
1457: for (i = 0; i < patch->npatch; ++i) {
1458: PCPatchCreateMatrix_Private(pc, i, &patch->mat[i]);
1459: }
1460: }
1461: PetscLogEventEnd(PC_Patch_CreatePatches, pc, 0, 0, 0);
1463: /* If desired, calculate weights for dof multiplicity */
1464: if (patch->partition_of_unity) {
1465: VecDuplicate(patch->localX, &patch->dof_weights);
1466: for (i = 0; i < patch->npatch; ++i) {
1467: PetscInt dof;
1469: PetscSectionGetDof(patch->gtolCounts, i+pStart, &dof);
1470: if (dof <= 0) continue;
1471: VecSet(patch->patchX[i], 1.0);
1472: PCPatch_ScatterLocal_Private(pc, i+pStart, patch->patchX[i], patch->dof_weights, ADD_VALUES, SCATTER_REVERSE);
1473: }
1474: VecReciprocal(patch->dof_weights);
1475: }
1476: }
1477: if (patch->save_operators) {
1478: for (i = 0; i < patch->npatch; ++i) {
1479: MatZeroEntries(patch->mat[i]);
1480: PCPatchComputeOperator_Private(pc, patch->mat[i], i);
1481: KSPSetOperators(patch->ksp[i], patch->mat[i], patch->mat[i]);
1482: }
1483: }
1484: if (!pc->setupcalled && patch->optionsSet) for (i = 0; i < patch->npatch; ++i) {KSPSetFromOptions(patch->ksp[i]);}
1485: return(0);
1486: }
1488: static PetscErrorCode PCApply_PATCH(PC pc, Vec x, Vec y)
1489: {
1490: PC_PATCH *patch = (PC_PATCH *) pc->data;
1491: const PetscScalar *globalX = NULL;
1492: PetscScalar *localX = NULL;
1493: PetscScalar *globalY = NULL;
1494: const PetscInt *bcNodes = NULL;
1495: PetscInt nsweep = patch->symmetrise_sweep ? 2 : 1;
1496: PetscInt start[2] = {0, 0};
1497: PetscInt end[2] = {-1, -1};
1498: const PetscInt inc[2] = {1, -1};
1499: const PetscScalar *localY;
1500: const PetscInt *iterationSet;
1501: PetscInt pStart, numBcs, n, sweep, bc, j;
1502: PetscErrorCode ierr;
1505: PetscLogEventBegin(PC_Patch_Apply, pc, 0, 0, 0);
1506: PetscOptionsPushGetViewerOff(PETSC_TRUE);
1507: end[0] = patch->npatch;
1508: start[1] = patch->npatch-1;
1509: if (patch->user_patches) {
1510: ISGetLocalSize(patch->iterationSet, &end[0]);
1511: start[1] = end[0] - 1;
1512: ISGetIndices(patch->iterationSet, &iterationSet);
1513: }
1514: /* Scatter from global space into overlapped local spaces */
1515: VecGetArrayRead(x, &globalX);
1516: VecGetArray(patch->localX, &localX);
1517: PetscSFBcastBegin(patch->defaultSF, MPIU_SCALAR, globalX, localX);
1518: PetscSFBcastEnd(patch->defaultSF, MPIU_SCALAR, globalX, localX);
1519: VecRestoreArrayRead(x, &globalX);
1520: VecRestoreArray(patch->localX, &localX);
1522: VecSet(patch->localY, 0.0);
1523: PetscSectionGetChart(patch->gtolCounts, &pStart, NULL);
1524: for (sweep = 0; sweep < nsweep; sweep++) {
1525: for (j = start[sweep]; j*inc[sweep] < end[sweep]*inc[sweep]; j += inc[sweep]) {
1526: PetscInt i = patch->user_patches ? iterationSet[j] : j;
1527: PetscInt start, len;
1529: PetscSectionGetDof(patch->gtolCounts, i+pStart, &len);
1530: PetscSectionGetOffset(patch->gtolCounts, i+pStart, &start);
1531: /* TODO: Squash out these guys in the setup as well. */
1532: if (len <= 0) continue;
1533: /* TODO: Do we need different scatters for X and Y? */
1534: PCPatch_ScatterLocal_Private(pc, i+pStart, patch->localX, patch->patchX[i], INSERT_VALUES, SCATTER_FORWARD);
1535: if (!patch->save_operators) {
1536: Mat mat;
1538: PCPatchCreateMatrix_Private(pc, i, &mat);
1539: /* Populate operator here. */
1540: PCPatchComputeOperator_Private(pc, mat, i);
1541: KSPSetOperators(patch->ksp[i], mat, mat);
1542: /* Drop reference so the KSPSetOperators below will blow it away. */
1543: MatDestroy(&mat);
1544: }
1545: PetscLogEventBegin(PC_Patch_Solve, pc, 0, 0, 0);
1546: KSPSolve(patch->ksp[i], patch->patchX[i], patch->patchY[i]);
1547: PetscLogEventEnd(PC_Patch_Solve, pc, 0, 0, 0);
1549: if (!patch->save_operators) {
1550: PC pc;
1551: KSPSetOperators(patch->ksp[i], NULL, NULL);
1552: KSPGetPC(patch->ksp[i], &pc);
1553: /* Destroy PC context too, otherwise the factored matrix hangs around. */
1554: PCReset(pc);
1555: }
1557: PCPatch_ScatterLocal_Private(pc, i+pStart, patch->patchY[i], patch->localY, ADD_VALUES, SCATTER_REVERSE);
1558: }
1559: }
1560: if (patch->user_patches) {ISRestoreIndices(patch->iterationSet, &iterationSet);}
1561: /* XXX: should we do this on the global vector? */
1562: if (patch->partition_of_unity) {
1563: VecPointwiseMult(patch->localY, patch->localY, patch->dof_weights);
1564: }
1565: /* Now patch->localY contains the solution of the patch solves, so we need to combine them all. */
1566: VecSet(y, 0.0);
1567: VecGetArray(y, &globalY);
1568: VecGetArrayRead(patch->localY, &localY);
1569: PetscSFReduceBegin(patch->defaultSF, MPIU_SCALAR, localY, globalY, MPI_SUM);
1570: PetscSFReduceEnd(patch->defaultSF, MPIU_SCALAR, localY, globalY, MPI_SUM);
1571: VecRestoreArrayRead(patch->localY, &localY);
1573: /* Now we need to send the global BC values through */
1574: VecGetArrayRead(x, &globalX);
1575: ISGetSize(patch->globalBcNodes, &numBcs);
1576: ISGetIndices(patch->globalBcNodes, &bcNodes);
1577: VecGetLocalSize(x, &n);
1578: for (bc = 0; bc < numBcs; ++bc) {
1579: const PetscInt idx = bcNodes[bc];
1580: if (idx < n) globalY[idx] = globalX[idx];
1581: }
1583: ISRestoreIndices(patch->globalBcNodes, &bcNodes);
1584: VecRestoreArrayRead(x, &globalX);
1585: VecRestoreArray(y, &globalY);
1587: PetscOptionsPopGetViewerOff();
1588: PetscLogEventEnd(PC_Patch_Apply, pc, 0, 0, 0);
1589: return(0);
1590: }
1592: static PetscErrorCode PCReset_PATCH(PC pc)
1593: {
1594: PC_PATCH *patch = (PC_PATCH *) pc->data;
1595: PetscInt i;
1599: /* TODO: Get rid of all these ifs */
1600: PetscSFDestroy(&patch->defaultSF);
1601: PetscSectionDestroy(&patch->cellCounts);
1602: PetscSectionDestroy(&patch->pointCounts);
1603: PetscSectionDestroy(&patch->cellNumbering);
1604: PetscSectionDestroy(&patch->gtolCounts);
1605: ISDestroy(&patch->gtol);
1606: ISDestroy(&patch->cells);
1607: ISDestroy(&patch->points);
1608: ISDestroy(&patch->dofs);
1609: ISDestroy(&patch->offs);
1610: PetscSectionDestroy(&patch->patchSection);
1611: ISDestroy(&patch->ghostBcNodes);
1612: ISDestroy(&patch->globalBcNodes);
1614: if (patch->dofSection) for (i = 0; i < patch->nsubspaces; i++) {PetscSectionDestroy(&patch->dofSection[i]);}
1615: PetscFree(patch->dofSection);
1616: PetscFree(patch->bs);
1617: PetscFree(patch->nodesPerCell);
1618: if (patch->cellNodeMap) for (i = 0; i < patch->nsubspaces; i++) {PetscFree(patch->cellNodeMap[i]);}
1619: PetscFree(patch->cellNodeMap);
1620: PetscFree(patch->subspaceOffsets);
1622: if (patch->ksp) {
1623: for (i = 0; i < patch->npatch; ++i) {KSPReset(patch->ksp[i]);}
1624: }
1626: VecDestroy(&patch->localX);
1627: VecDestroy(&patch->localY);
1628: if (patch->patchX) {
1629: for (i = 0; i < patch->npatch; ++i) {VecDestroy(&patch->patchX[i]);}
1630: PetscFree(patch->patchX);
1631: }
1632: if (patch->patchY) {
1633: for (i = 0; i < patch->npatch; ++i) {VecDestroy(&patch->patchY[i]);}
1634: PetscFree(patch->patchY);
1635: }
1636: VecDestroy(&patch->dof_weights);
1637: if (patch->patch_dof_weights) {
1638: for (i = 0; i < patch->npatch; ++i) {VecDestroy(&patch->patch_dof_weights[i]);}
1639: PetscFree(patch->patch_dof_weights);
1640: }
1641: if (patch->mat) {
1642: for (i = 0; i < patch->npatch; ++i) {MatDestroy(&patch->mat[i]);}
1643: PetscFree(patch->mat);
1644: }
1645: PetscFree(patch->sub_mat_type);
1646: if (patch->userIS) {
1647: for (i = 0; i < patch->npatch; ++i) {ISDestroy(&patch->userIS[i]);}
1648: PetscFree(patch->userIS);
1649: }
1650: patch->bs = 0;
1651: patch->cellNodeMap = NULL;
1652: patch->nsubspaces = 0;
1653: ISDestroy(&patch->iterationSet);
1655: PetscViewerDestroy(&patch->viewerSection);
1656: return(0);
1657: }
1659: static PetscErrorCode PCDestroy_PATCH(PC pc)
1660: {
1661: PC_PATCH *patch = (PC_PATCH *) pc->data;
1662: PetscInt i;
1666: PCReset_PATCH(pc);
1667: if (patch->ksp) {
1668: for (i = 0; i < patch->npatch; ++i) {KSPDestroy(&patch->ksp[i]);}
1669: PetscFree(patch->ksp);
1670: }
1671: PetscFree(pc->data);
1672: return(0);
1673: }
1675: static PetscErrorCode PCSetFromOptions_PATCH(PetscOptionItems *PetscOptionsObject, PC pc)
1676: {
1677: PC_PATCH *patch = (PC_PATCH *) pc->data;
1678: PCPatchConstructType patchConstructionType = PC_PATCH_STAR;
1679: char sub_mat_type[PETSC_MAX_PATH_LEN];
1680: const char *prefix;
1681: PetscBool flg, dimflg, codimflg;
1682: MPI_Comm comm;
1683: PetscErrorCode ierr;
1686: PetscObjectGetComm((PetscObject) pc, &comm);
1687: PetscObjectGetOptionsPrefix((PetscObject) pc, &prefix);
1688: PetscOptionsHead(PetscOptionsObject, "Vertex-patch Additive Schwarz options");
1689: PetscOptionsBool("-pc_patch_save_operators", "Store all patch operators for lifetime of PC?", "PCPatchSetSaveOperators", patch->save_operators, &patch->save_operators, &flg);
1690: PetscOptionsBool("-pc_patch_partition_of_unity", "Weight contributions by dof multiplicity?", "PCPatchSetPartitionOfUnity", patch->partition_of_unity, &patch->partition_of_unity, &flg);
1691: PetscOptionsInt("-pc_patch_construct_dim", "What dimension of mesh point to construct patches by? (0 = vertices)", "PCPATCH", patch->dim, &patch->dim, &dimflg);
1692: PetscOptionsInt("-pc_patch_construct_codim", "What co-dimension of mesh point to construct patches by? (0 = cells)", "PCPATCH", patch->codim, &patch->codim, &codimflg);
1693: if (dimflg && codimflg) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Can only set one of dimension or co-dimension");
1694: PetscOptionsEnum("-pc_patch_construct_type", "How should the patches be constructed?", "PCPatchSetConstructType", PCPatchConstructTypes, (PetscEnum) patchConstructionType, (PetscEnum *) &patchConstructionType, &flg);
1695: if (flg) {PCPatchSetConstructType(pc, patchConstructionType, NULL, NULL);}
1696: PetscOptionsInt("-pc_patch_vanka_dim", "Topological dimension of entities for Vanka to ignore", "PCPATCH", patch->vankadim, &patch->vankadim, &flg);
1697: PetscOptionsInt("-pc_patch_ignore_dim", "Topological dimension of entities for completion to ignore", "PCPATCH", patch->ignoredim, &patch->ignoredim, &flg);
1698: PetscOptionsFList("-pc_patch_sub_mat_type", "Matrix type for patch solves", "PCPatchSetSubMatType", MatList, NULL, sub_mat_type, PETSC_MAX_PATH_LEN, &flg);
1699: if (flg) {PCPatchSetSubMatType(pc, sub_mat_type);}
1700: PetscOptionsBool("-pc_patch_symmetrise_sweep", "Go start->end, end->start?", "PCPATCH", patch->symmetrise_sweep, &patch->symmetrise_sweep, &flg);
1701: PetscOptionsInt("-pc_patch_exclude_subspace", "What subspace (if any) to exclude in construction?", "PCPATCH", patch->exclude_subspace, &patch->exclude_subspace, &flg);
1702: if (patch->exclude_subspace && (patchConstructionType == PC_PATCH_USER)) SETERRQ(comm, PETSC_ERR_ARG_INCOMP, "We cannot support excluding a subspace with user patches because we do not index patches with a mesh point");
1704: PetscOptionsBool("-pc_patch_patches_view", "Print out information during patch construction", "PCPATCH", patch->viewPatches, &patch->viewPatches, &flg);
1705: PetscOptionsGetViewer(comm, prefix, "-pc_patch_cells_view", &patch->viewerCells, &patch->formatCells, &patch->viewCells);
1706: PetscOptionsGetViewer(comm, prefix, "-pc_patch_points_view", &patch->viewerPoints, &patch->formatPoints, &patch->viewPoints);
1707: PetscOptionsGetViewer(comm, prefix, "-pc_patch_section_view", &patch->viewerSection, &patch->formatSection, &patch->viewSection);
1708: PetscOptionsGetViewer(comm, prefix, "-pc_patch_mat_view", &patch->viewerMatrix, &patch->formatMatrix, &patch->viewMatrix);
1709: PetscOptionsTail();
1710: patch->optionsSet = PETSC_TRUE;
1711: return(0);
1712: }
1714: static PetscErrorCode PCSetUpOnBlocks_PATCH(PC pc)
1715: {
1716: PC_PATCH *patch = (PC_PATCH*) pc->data;
1717: KSPConvergedReason reason;
1718: PetscInt i;
1719: PetscErrorCode ierr;
1722: for (i = 0; i < patch->npatch; ++i) {
1723: KSPSetUp(patch->ksp[i]);
1724: KSPGetConvergedReason(patch->ksp[i], &reason);
1725: if (reason == KSP_DIVERGED_PCSETUP_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1726: }
1727: return(0);
1728: }
1730: static PetscErrorCode PCView_PATCH(PC pc, PetscViewer viewer)
1731: {
1732: PC_PATCH *patch = (PC_PATCH *) pc->data;
1733: PetscViewer sviewer;
1734: PetscBool isascii;
1735: PetscMPIInt rank;
1739: /* TODO Redo tabbing with set tbas in new style */
1740: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);
1741: if (!isascii) return(0);
1742: MPI_Comm_rank(PetscObjectComm((PetscObject) pc), &rank);
1743: PetscViewerASCIIPushTab(viewer);
1744: PetscViewerASCIIPrintf(viewer, "Subspace Correction preconditioner with %d patches\n", patch->npatch);
1745: PetscViewerASCIIPrintf(viewer, "Schwarz type: additive\n");
1746: if (patch->partition_of_unity) {PetscViewerASCIIPrintf(viewer, "Weighting by partition of unity\n");}
1747: else {PetscViewerASCIIPrintf(viewer, "Not weighting by partition of unity\n");}
1748: if (patch->symmetrise_sweep) {PetscViewerASCIIPrintf(viewer, "Symmetrising sweep (start->end, then end->start)\n");}
1749: else {PetscViewerASCIIPrintf(viewer, "Not symmetrising sweep\n");}
1750: if (!patch->save_operators) {PetscViewerASCIIPrintf(viewer, "Not saving patch operators (rebuilt every PCApply)\n");}
1751: else {PetscViewerASCIIPrintf(viewer, "Saving patch operators (rebuilt every PCSetUp)\n");}
1752: if (patch->patchconstructop == PCPatchConstruct_Star) {PetscViewerASCIIPrintf(viewer, "Patch construction operator: star\n");}
1753: else if (patch->patchconstructop == PCPatchConstruct_Vanka) {PetscViewerASCIIPrintf(viewer, "Patch construction operator: Vanka\n");}
1754: else if (patch->patchconstructop == PCPatchConstruct_User) {PetscViewerASCIIPrintf(viewer, "Patch construction operator: user-specified\n");}
1755: else {PetscViewerASCIIPrintf(viewer, "Patch construction operator: unknown\n");}
1756: PetscViewerASCIIPrintf(viewer, "KSP on patches (all same):\n");
1757: if (patch->ksp) {
1758: PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
1759: if (!rank) {
1760: PetscViewerASCIIPushTab(sviewer);
1761: KSPView(patch->ksp[0], sviewer);
1762: PetscViewerASCIIPopTab(sviewer);
1763: }
1764: PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
1765: } else {
1766: PetscViewerASCIIPushTab(viewer);
1767: PetscViewerASCIIPrintf(viewer, "KSP not yet set.\n");
1768: PetscViewerASCIIPopTab(viewer);
1769: }
1770: PetscViewerASCIIPopTab(viewer);
1771: return(0);
1772: }
1774: /*MC
1775: PCPATCH - A PC object that encapsulates flexible definition of blocks for overlapping and non-overlapping
1776: small block additive preconditioners. Block definition is based on topology from
1777: a DM and equation numbering from a PetscSection.
1779: Options Database Keys:
1780: + -pc_patch_cells_view - Views the process local cell numbers for each patch
1781: . -pc_patch_points_view - Views the process local mesh point numbers for each patch
1782: . -pc_patch_g2l_view - Views the map between global dofs and patch local dofs for each patch
1783: . -pc_patch_patches_view - Views the global dofs associated with each patch and its boundary
1784: - -pc_patch_sub_mat_view - Views the matrix associated with each patch
1786: Level: intermediate
1788: .seealso: PCType, PCCreate(), PCSetType()
1789: M*/
1790: PETSC_EXTERN PetscErrorCode PCCreate_Patch(PC pc)
1791: {
1792: PC_PATCH *patch;
1796: PetscNewLog(pc, &patch);
1798: /* Set some defaults */
1799: patch->combined = PETSC_FALSE;
1800: patch->save_operators = PETSC_TRUE;
1801: patch->partition_of_unity = PETSC_FALSE;
1802: patch->codim = -1;
1803: patch->dim = -1;
1804: patch->exclude_subspace = -1;
1805: patch->vankadim = -1;
1806: patch->ignoredim = -1;
1807: patch->patchconstructop = PCPatchConstruct_Star;
1808: patch->symmetrise_sweep = PETSC_FALSE;
1809: patch->npatch = 0;
1810: patch->userIS = NULL;
1811: patch->optionsSet = PETSC_FALSE;
1812: patch->iterationSet = NULL;
1813: patch->user_patches = PETSC_FALSE;
1814: PetscStrallocpy(MATDENSE, (char **) &patch->sub_mat_type);
1815: patch->viewPatches = PETSC_FALSE;
1816: patch->viewCells = PETSC_FALSE;
1817: patch->viewPoints = PETSC_FALSE;
1818: patch->viewSection = PETSC_FALSE;
1819: patch->viewMatrix = PETSC_FALSE;
1821: pc->data = (void *) patch;
1822: pc->ops->apply = PCApply_PATCH;
1823: pc->ops->applytranspose = 0; /* PCApplyTranspose_PATCH; */
1824: pc->ops->setup = PCSetUp_PATCH;
1825: pc->ops->reset = PCReset_PATCH;
1826: pc->ops->destroy = PCDestroy_PATCH;
1827: pc->ops->setfromoptions = PCSetFromOptions_PATCH;
1828: pc->ops->setuponblocks = PCSetUpOnBlocks_PATCH;
1829: pc->ops->view = PCView_PATCH;
1830: pc->ops->applyrichardson = 0;
1832: return(0);
1833: }