Actual source code: plexcheckinterface.c
1: #include <petsc/private/dmpleximpl.h>
3: /* TODO PetscArrayExchangeBegin/End */
4: /* TODO blocksize */
5: /* TODO move to API ? */
6: static PetscErrorCode ExchangeArrayByRank_Private(PetscObject obj, MPI_Datatype dt, PetscInt nsranks, const PetscMPIInt sranks[], PetscInt ssize[], const void *sarr[], PetscInt nrranks, const PetscMPIInt rranks[], PetscInt *rsize_out[], void **rarr_out[])
7: {
8: PetscInt r;
9: PetscInt *rsize;
10: void **rarr;
11: MPI_Request *sreq, *rreq;
12: PetscMPIInt tag, unitsize;
13: MPI_Comm comm;
15: PetscFunctionBegin;
16: PetscCallMPI(MPI_Type_size(dt, &unitsize));
17: PetscCall(PetscObjectGetComm(obj, &comm));
18: PetscCall(PetscMalloc2(nrranks, &rsize, nrranks, &rarr));
19: PetscCall(PetscMalloc2(nrranks, &rreq, nsranks, &sreq));
20: /* exchange array size */
21: PetscCall(PetscObjectGetNewTag(obj, &tag));
22: for (r = 0; r < nrranks; r++) PetscCallMPI(MPI_Irecv(&rsize[r], 1, MPIU_INT, rranks[r], tag, comm, &rreq[r]));
23: for (r = 0; r < nsranks; r++) PetscCallMPI(MPI_Isend(&ssize[r], 1, MPIU_INT, sranks[r], tag, comm, &sreq[r]));
24: PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
25: PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
26: /* exchange array */
27: PetscCall(PetscObjectGetNewTag(obj, &tag));
28: for (r = 0; r < nrranks; r++) {
29: PetscCall(PetscMalloc(rsize[r] * unitsize, &rarr[r]));
30: PetscCallMPI(MPI_Irecv(rarr[r], rsize[r], dt, rranks[r], tag, comm, &rreq[r]));
31: }
32: for (r = 0; r < nsranks; r++) PetscCallMPI(MPI_Isend(sarr[r], ssize[r], dt, sranks[r], tag, comm, &sreq[r]));
33: PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
34: PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
35: PetscCall(PetscFree2(rreq, sreq));
36: *rsize_out = rsize;
37: *rarr_out = rarr;
38: PetscFunctionReturn(PETSC_SUCCESS);
39: }
41: /* TODO VecExchangeBegin/End */
42: /* TODO move to API ? */
43: static PetscErrorCode ExchangeVecByRank_Private(PetscObject obj, PetscInt nsranks, const PetscMPIInt sranks[], Vec svecs[], PetscInt nrranks, const PetscMPIInt rranks[], Vec *rvecs[])
44: {
45: PetscInt r;
46: PetscInt *ssize, *rsize;
47: PetscScalar **rarr;
48: const PetscScalar **sarr;
49: Vec *rvecs_;
50: MPI_Request *sreq, *rreq;
52: PetscFunctionBegin;
53: PetscCall(PetscMalloc4(nsranks, &ssize, nsranks, &sarr, nrranks, &rreq, nsranks, &sreq));
54: for (r = 0; r < nsranks; r++) {
55: PetscCall(VecGetLocalSize(svecs[r], &ssize[r]));
56: PetscCall(VecGetArrayRead(svecs[r], &sarr[r]));
57: }
58: PetscCall(ExchangeArrayByRank_Private(obj, MPIU_SCALAR, nsranks, sranks, ssize, (const void **)sarr, nrranks, rranks, &rsize, (void ***)&rarr));
59: PetscCall(PetscMalloc1(nrranks, &rvecs_));
60: for (r = 0; r < nrranks; r++) {
61: /* set array in two steps to mimic PETSC_OWN_POINTER */
62: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, rsize[r], NULL, &rvecs_[r]));
63: PetscCall(VecReplaceArray(rvecs_[r], rarr[r]));
64: }
65: for (r = 0; r < nsranks; r++) PetscCall(VecRestoreArrayRead(svecs[r], &sarr[r]));
66: PetscCall(PetscFree2(rsize, rarr));
67: PetscCall(PetscFree4(ssize, sarr, rreq, sreq));
68: *rvecs = rvecs_;
69: PetscFunctionReturn(PETSC_SUCCESS);
70: }
72: static PetscErrorCode SortByRemote_Private(PetscSF sf, PetscInt *rmine1[], PetscInt *rremote1[])
73: {
74: PetscInt nleaves;
75: PetscInt nranks;
76: const PetscMPIInt *ranks;
77: const PetscInt *roffset, *rmine, *rremote;
78: PetscInt n, o, r;
80: PetscFunctionBegin;
81: PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));
82: nleaves = roffset[nranks];
83: PetscCall(PetscMalloc2(nleaves, rmine1, nleaves, rremote1));
84: for (r = 0; r < nranks; r++) {
85: /* simultaneously sort rank-wise portions of rmine & rremote by values in rremote
86: - to unify order with the other side */
87: o = roffset[r];
88: n = roffset[r + 1] - o;
89: PetscCall(PetscArraycpy(&(*rmine1)[o], &rmine[o], n));
90: PetscCall(PetscArraycpy(&(*rremote1)[o], &rremote[o], n));
91: PetscCall(PetscSortIntWithArray(n, &(*rremote1)[o], &(*rmine1)[o]));
92: }
93: PetscFunctionReturn(PETSC_SUCCESS);
94: }
96: static PetscErrorCode GetRecursiveConeCoordinatesPerRank_Private(DM dm, PetscSF sf, PetscInt rmine[], Vec *coordinatesPerRank[])
97: {
98: IS pointsPerRank, conesPerRank;
99: PetscInt nranks;
100: const PetscMPIInt *ranks;
101: const PetscInt *roffset;
102: PetscInt n, o, r;
104: PetscFunctionBegin;
105: PetscCall(DMGetCoordinatesLocalSetUp(dm));
106: PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, NULL, NULL));
107: PetscCall(PetscMalloc1(nranks, coordinatesPerRank));
108: for (r = 0; r < nranks; r++) {
109: o = roffset[r];
110: n = roffset[r + 1] - o;
111: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, &rmine[o], PETSC_USE_POINTER, &pointsPerRank));
112: PetscCall(DMPlexGetConeRecursiveVertices(dm, pointsPerRank, &conesPerRank));
113: PetscCall(DMGetCoordinatesLocalTuple(dm, conesPerRank, NULL, &(*coordinatesPerRank)[r]));
114: PetscCall(ISDestroy(&pointsPerRank));
115: PetscCall(ISDestroy(&conesPerRank));
116: }
117: PetscFunctionReturn(PETSC_SUCCESS);
118: }
120: static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(PetscSF sf, PetscSF imsf, PetscInt *irmine1[])
121: {
122: PetscInt *mRootsOrigNumbering;
123: PetscInt nileaves, niranks;
124: const PetscInt *iroffset, *irmine, *degree;
125: PetscInt i, n, o, r;
127: PetscFunctionBegin;
128: PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
129: PetscCall(PetscSFGetRootRanks(imsf, &niranks, NULL, &iroffset, &irmine, NULL));
130: PetscCheck(nileaves == iroffset[niranks], PETSC_COMM_SELF, PETSC_ERR_PLIB, "nileaves != iroffset[niranks])");
131: PetscCall(PetscSFComputeDegreeBegin(sf, °ree));
132: PetscCall(PetscSFComputeDegreeEnd(sf, °ree));
133: PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, NULL, &mRootsOrigNumbering));
134: PetscCall(PetscMalloc1(nileaves, irmine1));
135: for (r = 0; r < niranks; r++) {
136: o = iroffset[r];
137: n = iroffset[r + 1] - o;
138: for (i = 0; i < n; i++) (*irmine1)[o + i] = mRootsOrigNumbering[irmine[o + i]];
139: }
140: PetscCall(PetscFree(mRootsOrigNumbering));
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: /*@
145: DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.
147: Input Parameter:
148: . dm - The `DMPLEX` object
150: Level: developer
152: Notes:
153: For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point `PetscSF` contains connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
154: then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.
156: This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use `DMPlexCheckFaces()`.
158: For the complete list of DMPlexCheck* functions, see `DMSetFromOptions()`.
160: Developer Notes:
161: Interface cones are expanded into vertices and then their coordinates are compared.
163: .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetCone()`, `DMPlexGetConeSize()`, `DMGetPointSF()`, `DMGetCoordinates()`, `DMSetFromOptions()`
164: @*/
165: PetscErrorCode DMPlexCheckInterfaceCones(DM dm)
166: {
167: PetscSF sf;
168: PetscInt nleaves, nranks, nroots;
169: const PetscInt *mine, *roffset, *rmine, *rremote;
170: const PetscSFNode *remote;
171: const PetscMPIInt *ranks;
172: PetscSF msf, imsf;
173: PetscInt nileaves, niranks;
174: const PetscMPIInt *iranks;
175: const PetscInt *iroffset, *irmine, *irremote;
176: PetscInt *rmine1, *rremote1; /* rmine and rremote copies simultaneously sorted by rank and rremote */
177: PetscInt *mine_orig_numbering;
178: Vec *sntCoordinatesPerRank;
179: Vec *refCoordinatesPerRank;
180: Vec *recCoordinatesPerRank = NULL;
181: PetscInt r;
182: PetscMPIInt size, rank;
183: PetscBool same;
184: PetscBool verbose = PETSC_FALSE;
185: MPI_Comm comm;
187: PetscFunctionBegin;
189: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
190: PetscCallMPI(MPI_Comm_rank(comm, &rank));
191: PetscCallMPI(MPI_Comm_size(comm, &size));
192: if (size < 2) PetscFunctionReturn(PETSC_SUCCESS);
193: PetscCall(DMGetPointSF(dm, &sf));
194: if (!sf) PetscFunctionReturn(PETSC_SUCCESS);
195: PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, &mine, &remote));
196: if (nroots < 0) PetscFunctionReturn(PETSC_SUCCESS);
197: PetscCheck(dm->coordinates[0].x || dm->coordinates[0].xl, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM coordinates must be set");
198: PetscCall(PetscSFSetUp(sf));
199: PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));
201: /* Expand sent cones per rank */
202: PetscCall(SortByRemote_Private(sf, &rmine1, &rremote1));
203: PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, sf, rmine1, &sntCoordinatesPerRank));
205: /* Create inverse SF */
206: PetscCall(PetscSFGetMultiSF(sf, &msf));
207: PetscCall(PetscSFCreateInverseSF(msf, &imsf));
208: PetscCall(PetscSFSetUp(imsf));
209: PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
210: PetscCall(PetscSFGetRootRanks(imsf, &niranks, &iranks, &iroffset, &irmine, &irremote));
212: /* Compute original numbering of multi-roots (referenced points) */
213: PetscCall(PetscSFComputeMultiRootOriginalNumberingByRank_Private(sf, imsf, &mine_orig_numbering));
215: /* Expand coordinates of the referred cones per rank */
216: PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, imsf, mine_orig_numbering, &refCoordinatesPerRank));
218: /* Send the coordinates */
219: PetscCall(ExchangeVecByRank_Private((PetscObject)sf, nranks, ranks, sntCoordinatesPerRank, niranks, iranks, &recCoordinatesPerRank));
221: /* verbose output */
222: PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL));
223: if (verbose) {
224: PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
225: PetscCall(PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n"));
226: PetscCall(PetscViewerASCIIPushSynchronized(v));
227: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
228: for (r = 0; r < size; r++) {
229: if (r < nranks) {
230: PetscCall(PetscViewerASCIISynchronizedPrintf(v, " r=%" PetscInt_FMT " ranks[r]=%d sntCoordinatesPerRank[r]:\n", r, ranks[r]));
231: PetscCall(PetscViewerASCIIPushTab(v));
232: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
233: PetscCall(VecView(sntCoordinatesPerRank[r], sv));
234: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
235: PetscCall(PetscViewerASCIIPopTab(v));
236: } else {
237: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
238: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
239: }
240: }
241: PetscCall(PetscViewerASCIISynchronizedPrintf(v, " ----------\n"));
242: for (r = 0; r < size; r++) {
243: if (r < niranks) {
244: PetscCall(PetscViewerASCIISynchronizedPrintf(v, " r=%" PetscInt_FMT " iranks[r]=%d refCoordinatesPerRank[r]:\n", r, iranks[r]));
245: PetscCall(PetscViewerASCIIPushTab(v));
246: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
247: PetscCall(VecView(refCoordinatesPerRank[r], sv));
248: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
249: PetscCall(PetscViewerASCIIPopTab(v));
250: } else {
251: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
252: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
253: }
254: }
255: PetscCall(PetscViewerASCIISynchronizedPrintf(v, " ----------\n"));
256: for (r = 0; r < size; r++) {
257: if (r < niranks) {
258: PetscCall(PetscViewerASCIISynchronizedPrintf(v, " r=%" PetscInt_FMT " iranks[r]=%d recCoordinatesPerRank[r]:\n", r, iranks[r]));
259: PetscCall(PetscViewerASCIIPushTab(v));
260: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
261: PetscCall(VecView(recCoordinatesPerRank[r], sv));
262: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
263: PetscCall(PetscViewerASCIIPopTab(v));
264: } else {
265: PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
266: PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
267: }
268: }
269: PetscCall(PetscViewerASCIIPopSynchronized(v));
270: }
272: /* Compare recCoordinatesPerRank with refCoordinatesPerRank */
273: for (r = 0; r < niranks; r++) {
274: PetscCall(VecEqual(refCoordinatesPerRank[r], recCoordinatesPerRank[r], &same));
275: PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_PLIB, "interface cones do not conform for remote rank %d", iranks[r]);
276: }
278: /* destroy sent stuff */
279: for (r = 0; r < nranks; r++) PetscCall(VecDestroy(&sntCoordinatesPerRank[r]));
280: PetscCall(PetscFree(sntCoordinatesPerRank));
281: PetscCall(PetscFree2(rmine1, rremote1));
282: PetscCall(PetscSFDestroy(&imsf));
284: /* destroy referenced stuff */
285: for (r = 0; r < niranks; r++) PetscCall(VecDestroy(&refCoordinatesPerRank[r]));
286: PetscCall(PetscFree(refCoordinatesPerRank));
287: PetscCall(PetscFree(mine_orig_numbering));
289: /* destroy received stuff */
290: for (r = 0; r < niranks; r++) PetscCall(VecDestroy(&recCoordinatesPerRank[r]));
291: PetscCall(PetscFree(recCoordinatesPerRank));
292: PetscFunctionReturn(PETSC_SUCCESS);
293: }