Actual source code: networkview.c
1: #include <petscconf.h>
2: // We need to define this ahead of any other includes to make sure mkstemp is actually defined
3: #if defined(PETSC_HAVE_MKSTEMP)
4: #if !defined(_XOPEN_SOURCE)
5: #define _XOPEN_SOURCE 600
6: #endif
7: #endif
8: #include <petsc/private/dmnetworkimpl.h>
9: #include <petscdraw.h>
11: static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
12: {
13: DM dmcoords;
14: PetscInt nsubnets, i, subnet, nvertices, nedges, vertex, edge, gidx, ncomp;
15: PetscInt vertexOffsets[2], globalEdgeVertices[2];
16: PetscScalar vertexCoords[2], *color_ptr, color;
17: const PetscInt *vertices, *edges, *edgeVertices;
18: Vec allVertexCoords;
19: PetscMPIInt rank;
20: MPI_Comm comm;
22: PetscFunctionBegin;
23: // Get the coordinate information from dmcoords
24: PetscCheck(dm->coordinates[0].dm, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_NULL, "CoordinateDM not created");
25: PetscCall(DMGetCoordinateDM(dm, &dmcoords));
27: PetscCall(DMGetCoordinateDim(dmcoords, &i));
28: PetscCheck(i == 2, PETSC_COMM_WORLD, PETSC_ERR_SUP, "dim %" PetscInt_FMT " != 2 is not supported yet", i);
30: // Get the coordinate vector from dm
31: PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));
33: // Get the MPI communicator and this process' rank
34: PetscCall(PetscObjectGetComm((PetscObject)dmcoords, &comm));
35: PetscCallMPI(MPI_Comm_rank(comm, &rank));
37: // Start synchronized printing
38: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
40: // Write the header
41: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));
43: // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
44: PetscCall(DMNetworkGetNumSubNetworks(dmcoords, NULL, &nsubnets));
45: for (subnet = 0; subnet < nsubnets; subnet++) {
46: // Get the subnetwork's vertices and edges
47: PetscCall(DMNetworkGetSubnetwork(dmcoords, subnet, &nvertices, &nedges, &vertices, &edges));
49: // Write out each vertex
50: for (i = 0; i < nvertices; i++) {
51: vertex = vertices[i];
53: // Get the offset into the coordinate vector for the vertex
54: PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
55: vertexOffsets[1] = vertexOffsets[0] + 1;
56: // Remap vertex to the global value
57: PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, vertex, &gidx));
58: // Get the vertex position from the coordinate vector
59: PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));
61: // Get vertex color; TODO: name
62: PetscCall(DMNetworkGetNumComponents(dmcoords, vertex, &ncomp));
63: PetscCheck(ncomp <= 1, PETSC_COMM_WORLD, PETSC_ERR_SUP, "num of components %" PetscInt_FMT " must be <= 1", ncomp);
64: color = 0.0;
65: if (ncomp == 1) {
66: PetscCall(DMNetworkGetComponent(dmcoords, vertex, 0, NULL, (void **)&color_ptr, NULL));
67: color = *color_ptr;
68: }
69: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT ",%lf\n", (PetscInt)rank, gidx, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), gidx, (double)PetscRealPart(color)));
70: }
72: // Write out each edge
73: for (i = 0; i < nedges; i++) {
74: edge = edges[i];
75: PetscCall(DMNetworkGetConnectedVertices(dmcoords, edge, &edgeVertices));
76: PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[0], &globalEdgeVertices[0]));
77: PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[1], &globalEdgeVertices[1]));
78: PetscCall(DMNetworkGetGlobalEdgeIndex(dmcoords, edge, &edge));
80: // TODO: Determine edge color/name
81: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
82: }
83: }
84: // End synchronized printing
85: PetscCall(PetscViewerFlush(viewer));
86: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
87: PetscFunctionReturn(PETSC_SUCCESS);
88: }
90: static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
91: {
92: PetscMPIInt rank, size;
93: MPI_Comm comm;
94: char filename[PETSC_MAX_PATH_LEN + 1], options[512], proccall[PETSC_MAX_PATH_LEN + 512], scriptFile[PETSC_MAX_PATH_LEN + 1], buffer[256], buffer2[256];
95: PetscViewer csvViewer;
96: FILE *processFile = NULL;
97: PetscBool isnull, optionShowRanks = PETSC_FALSE, optionRankIsSet = PETSC_FALSE, showNoNodes = PETSC_FALSE, showNoNumbering = PETSC_FALSE, optionShowVertices = PETSC_FALSE, optionViewPadding = PETSC_FALSE;
98: PetscDraw draw;
99: DM_Network *network = (DM_Network *)dm->data;
100: PetscReal drawPause, viewPadding = 1.0;
101: PetscInt i;
102: #if defined(PETSC_HAVE_MKSTEMP)
103: PetscBool isSharedTmp;
104: #endif
106: PetscFunctionBegin;
107: // Deal with the PetscDraw we are given
108: PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
109: PetscCall(PetscDrawIsNull(draw, &isnull));
110: PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));
112: // Clear the file name buffer so all communicated bytes are well-defined
113: PetscCall(PetscMemzero(filename, sizeof(filename)));
115: // Get the MPI communicator and this process' rank
116: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
117: PetscCallMPI(MPI_Comm_rank(comm, &rank));
118: PetscCallMPI(MPI_Comm_size(comm, &size));
120: #if defined(PETSC_HAVE_MKSTEMP)
121: // Get if the temporary directory is shared
122: // Note: This must be done collectively on every rank, it cannot be done on a single rank
123: PetscCall(PetscSharedTmp(comm, &isSharedTmp));
124: #endif
126: /* Process Options */
127: optionShowRanks = network->vieweroptions.showallranks;
128: showNoNodes = network->vieweroptions.shownovertices;
129: showNoNumbering = network->vieweroptions.shownonumbering;
131: /*
132: TODO: if the option -dmnetwork_view_tmpdir can be moved up here that would be good as well.
133: */
134: PetscOptionsBegin(PetscObjectComm((PetscObject)dm), ((PetscObject)dm)->prefix, "MatPlotLib PetscViewer DMNetwork Options", "PetscViewer");
135: PetscCall(PetscOptionsBool("-dmnetwork_view_all_ranks", "View all ranks in the DMNetwork", NULL, optionShowRanks, &optionShowRanks, NULL));
136: PetscCall(PetscOptionsString("-dmnetwork_view_rank_range", "Set of ranks to view the DMNetwork on", NULL, buffer, buffer, sizeof(buffer), &optionRankIsSet));
137: PetscCall(PetscOptionsBool("-dmnetwork_view_no_vertices", "Do not view vertices", NULL, showNoNodes, &showNoNodes, NULL));
138: PetscCall(PetscOptionsBool("-dmnetwork_view_no_numbering", "Do not view edge and vertex numbering", NULL, showNoNumbering, &showNoNumbering, NULL));
139: PetscCall(PetscOptionsString("-dmnetwork_view_zoomin_vertices", "Focus the view on the given set of vertices", NULL, buffer2, buffer2, sizeof(buffer2), &optionShowVertices));
140: PetscCall(PetscOptionsReal("-dmnetwork_view_zoomin_vertices_padding", "Set the padding when viewing specific vertices", NULL, viewPadding, &viewPadding, &optionViewPadding));
141: PetscOptionsEnd();
143: // Generate and broadcast the temporary file name from rank 0
144: if (rank == 0) {
145: #if defined(PETSC_HAVE_TMPNAM_S)
146: // Acquire a temporary file to write to and open an ASCII/CSV viewer
147: PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
148: #elif defined(PETSC_HAVE_MKSTEMP)
149: PetscBool isTmpOverridden;
150: size_t numChars;
151: // Same thing, but for POSIX systems on which tmpnam is deprecated
152: // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
153: // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
154: PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden));
155: // If not specified by option try using a shared tmp on the system
156: if (!isTmpOverridden) {
157: // Validate that if tmp is not overridden it is at least shared
158: PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory");
159: PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename)));
160: }
161: // Make sure the filename ends with a '/'
162: PetscCall(PetscStrlen(filename, &numChars));
163: if (filename[numChars - 1] != '/') {
164: filename[numChars] = '/';
165: filename[numChars + 1] = 0;
166: }
167: // Perform the actual temporary file creation
168: PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
169: PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
170: #else
171: // Same thing, but for older C versions which don't have the safe form
172: PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
173: #endif
174: }
176: // Broadcast the filename to all other MPI ranks
177: PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm));
179: PetscCall(PetscViewerASCIIOpen(comm, filename, &csvViewer));
180: PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));
182: // Use the CSV viewer to write out the local network
183: PetscCall(DMView_Network_CSV(dm, csvViewer));
185: // Close the viewer
186: PetscCall(PetscViewerDestroy(&csvViewer));
188: // Generate options string
189: PetscCall(PetscMemzero(options, sizeof(options)));
190: // If the draw is null run as a "test execute" ie. do nothing just test that the script was called correctly
191: PetscCall(PetscStrlcat(options, isnull ? " -tx " : " ", sizeof(options)));
192: PetscCall(PetscDrawGetPause(draw, &drawPause));
193: if (drawPause > 0) {
194: char pausebuffer[64];
195: PetscCall(PetscSNPrintf(pausebuffer, sizeof(pausebuffer), "%f", (double)drawPause));
196: PetscCall(PetscStrlcat(options, " -dt ", sizeof(options)));
197: PetscCall(PetscStrlcat(options, pausebuffer, sizeof(options)));
198: }
199: if (optionShowRanks || optionRankIsSet) {
200: // Show all ranks only if the option is set in code or by the user AND not showing specific ranks AND there is more than one process
201: if (optionShowRanks && !optionRankIsSet && size != 1) PetscCall(PetscStrlcat(options, " -dar ", sizeof(options)));
202: // Do not show the global plot if the user requests it OR if one specific rank is requested
203: if (network->vieweroptions.dontshowglobal || optionRankIsSet) PetscCall(PetscStrlcat(options, " -ncp ", sizeof(options)));
205: if (optionRankIsSet) {
206: // If a range of ranks to draw is specified append it
207: PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
208: PetscCall(PetscStrlcat(options, buffer, sizeof(options)));
209: } else {
210: // Otherwise, use the options provided in code
211: if (network->vieweroptions.viewranks) {
212: const PetscInt *viewranks;
213: PetscInt viewrankssize;
214: char rankbuffer[64];
215: PetscCall(ISGetTotalIndices(network->vieweroptions.viewranks, &viewranks));
216: PetscCall(ISGetSize(network->vieweroptions.viewranks, &viewrankssize));
217: PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
218: for (i = 0; i < viewrankssize; i++) {
219: PetscCall(PetscSNPrintf(rankbuffer, sizeof(rankbuffer), "%" PetscInt_FMT, viewranks[i]));
220: PetscCall(PetscStrlcat(options, rankbuffer, sizeof(options)));
221: }
222: PetscCall(ISRestoreTotalIndices(network->vieweroptions.viewranks, &viewranks));
223: } // if not provided an IS of viewing ranks, skip viewing
224: }
225: }
226: if (optionShowVertices) {
227: // Pass vertices to focus on if defined
228: PetscCall(PetscStrlcat(options, " -vsv ", sizeof(options)));
229: PetscCall(PetscStrlcat(options, buffer2, sizeof(options)));
230: optionViewPadding = PETSC_TRUE;
231: // Pass padding if set
232: if (optionViewPadding) {
233: PetscCall(PetscSNPrintf(buffer2, sizeof(buffer2), "%f", (double)viewPadding));
234: PetscCall(PetscStrlcat(options, " -vp ", sizeof(options)));
235: PetscCall(PetscStrlcat(options, buffer2, sizeof(options)));
236: }
237: }
239: // Check for options for visibility...
240: if (showNoNodes) PetscCall(PetscStrlcat(options, " -nn ", sizeof(options)));
241: if (showNoNumbering) PetscCall(PetscStrlcat(options, " -nnl -nel ", sizeof(options)));
243: // Get the value of $PETSC_DIR
244: PetscCall(PetscStrreplace(comm, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
245: PetscCall(PetscFixFilename(scriptFile, scriptFile));
246: // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py '
247: PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
248: PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, options, filename));
250: #if defined(PETSC_HAVE_POPEN)
251: // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
252: PetscCall(PetscPOpen(comm, NULL, proccall, "r", &processFile));
253: if (processFile != NULL) {
254: while (fgets(buffer, sizeof(buffer), processFile) != NULL) PetscCall(PetscPrintf(comm, "%s", buffer));
255: }
256: PetscCall(PetscPClose(comm, processFile));
257: #else
258: // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
259: if (rank == 0) PetscCheck(system(proccall) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to call viewer script");
260: // Barrier so that all ranks wait until the call completes
261: PetscCallMPI(MPI_Barrier(comm));
262: #endif
263: // Clean up the temporary file we used using rank 0
264: if (rank == 0) PetscCheck(remove(filename) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to delete temporary file");
265: PetscFunctionReturn(PETSC_SUCCESS);
266: }
268: PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
269: {
270: PetscBool iascii, isdraw;
271: PetscViewerFormat format;
273: PetscFunctionBegin;
276: PetscCall(PetscViewerGetFormat(viewer, &format));
278: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
279: if (isdraw) {
280: PetscCall(DMView_Network_Matplotlib(dm, viewer));
281: PetscFunctionReturn(PETSC_SUCCESS);
282: }
284: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
285: if (iascii) {
286: const PetscInt *cone, *vtx, *edges;
287: PetscInt vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
288: DM_Network *network = (DM_Network *)dm->data;
289: PetscMPIInt rank;
291: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
292: if (format == PETSC_VIEWER_ASCII_CSV) {
293: PetscCall(DMView_Network_CSV(dm, viewer));
294: PetscFunctionReturn(PETSC_SUCCESS);
295: }
297: nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
298: if (!rank) {
299: PetscCall(PetscPrintf(PETSC_COMM_SELF, " NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
300: network->cloneshared->Nsvtx));
301: }
303: PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
304: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
305: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));
307: for (i = 0; i < nsubnet; i++) {
308: PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
309: if (ne) {
310: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
311: for (j = 0; j < ne; j++) {
312: p = edges[j];
313: PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
314: PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
315: PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
316: PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
317: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
318: }
319: }
320: }
322: /* Shared vertices */
323: PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
324: if (nsv) {
325: PetscInt gidx;
326: PetscBool ghost;
327: const PetscInt *sv = NULL;
329: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " SharedVertices:\n"));
330: for (i = 0; i < nsv; i++) {
331: PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
332: if (ghost) continue;
334: PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
335: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
336: for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
337: }
338: }
339: PetscCall(PetscViewerFlush(viewer));
340: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
341: } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
342: PetscFunctionReturn(PETSC_SUCCESS);
343: }
345: /*@
346: DMNetworkViewSetShowRanks - Sets viewing the `DMETNWORK` on each rank individually.
348: Logically Collective
350: Input Parameter:
351: . dm - the `DMNETWORK` object
353: Output Parameter:
354: . showranks - `PETSC_TRUE` if viewing each rank's sub network individually
356: Level: beginner
358: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
359: @*/
360: PetscErrorCode DMNetworkViewSetShowRanks(DM dm, PetscBool showranks)
361: {
362: DM_Network *network = (DM_Network *)dm->data;
364: PetscFunctionBegin;
366: network->vieweroptions.showallranks = showranks;
367: PetscFunctionReturn(PETSC_SUCCESS);
368: }
370: /*@
371: DMNetworkViewSetShowGlobal - Set viewing the global network.
373: Logically Collective
375: Input Parameter:
376: . dm - the `DMNETWORK` object
378: Output Parameter:
379: . showglobal - `PETSC_TRUE` if viewing the global network
381: Level: beginner
383: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
384: @*/
385: PetscErrorCode DMNetworkViewSetShowGlobal(DM dm, PetscBool showglobal)
386: {
387: DM_Network *network = (DM_Network *)dm->data;
389: PetscFunctionBegin;
391: network->vieweroptions.dontshowglobal = (PetscBool)(!showglobal);
392: PetscFunctionReturn(PETSC_SUCCESS);
393: }
395: /*@
396: DMNetworkViewSetShowVertices - Sets whether to display the vertices in viewing routines.
398: Logically Collective
400: Input Parameter:
401: . dm - the `DMNETWORK` object
403: Output Parameter:
404: . showvertices - `PETSC_TRUE` if visualizing the vertices
406: Level: beginner
408: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
409: @*/
410: PetscErrorCode DMNetworkViewSetShowVertices(DM dm, PetscBool showvertices)
411: {
412: DM_Network *network = (DM_Network *)dm->data;
414: PetscFunctionBegin;
416: network->vieweroptions.shownovertices = (PetscBool)(!showvertices);
417: PetscFunctionReturn(PETSC_SUCCESS);
418: }
420: /*@
421: DMNetworkViewSetShowNumbering - Set displaying the numbering of edges and vertices in viewing routines.
423: Logically Collective
425: Input Parameter:
426: . dm - the `DMNETWORK` object
428: Output Parameter:
429: . shownumbering - `PETSC_TRUE` if displaying the numbering of edges and vertices
431: Level: beginner
433: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetViewRanks()`
434: @*/
435: PetscErrorCode DMNetworkViewSetShowNumbering(DM dm, PetscBool shownumbering)
436: {
437: DM_Network *network = (DM_Network *)dm->data;
439: PetscFunctionBegin;
441: network->vieweroptions.shownonumbering = (PetscBool)(!shownumbering);
442: PetscFunctionReturn(PETSC_SUCCESS);
443: }
445: /*@
446: DMNetworkViewSetViewRanks - View the `DMNETWORK` on each of the specified ranks individually.
448: Collective
450: Input Parameter:
451: . dm - the `DMNETWORK` object
453: Output Parameter:
454: . viewranks - set of ranks to view the `DMNETWORK` on individually
456: Level: beginner
458: Note:
459: `DMNetwork` takes ownership of the input viewranks `IS`, it should be destroyed by the caller.
461: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`
462: @*/
463: PetscErrorCode DMNetworkViewSetViewRanks(DM dm, IS viewranks)
464: {
465: DM_Network *network = (DM_Network *)dm->data;
467: PetscFunctionBegin;
470: PetscCheckSameComm(dm, 1, viewranks, 2);
471: PetscCall(ISDestroy(&network->vieweroptions.viewranks));
472: PetscCall(PetscObjectReference((PetscObject)viewranks));
473: network->vieweroptions.viewranks = viewranks;
474: PetscFunctionReturn(PETSC_SUCCESS);
475: }