Actual source code: network.c
1: #include <petsc/private/dmnetworkimpl.h>
2: #include "petscis.h"
4: PetscLogEvent DMNetwork_LayoutSetUp;
5: PetscLogEvent DMNetwork_SetUpNetwork;
6: PetscLogEvent DMNetwork_Distribute;
8: /*
9: Creates the component header and value objects for a network point
10: */
11: static PetscErrorCode SetUpNetworkHeaderComponentValue(DM dm, DMNetworkComponentHeader header, DMNetworkComponentValue cvalue)
12: {
13: PetscFunctionBegin;
14: /* Allocate arrays for component information */
15: PetscCall(PetscCalloc5(header->maxcomps, &header->size, header->maxcomps, &header->key, header->maxcomps, &header->offset, header->maxcomps, &header->nvar, header->maxcomps, &header->offsetvarrel));
16: PetscCall(PetscCalloc1(header->maxcomps, &cvalue->data));
18: /* The size of the header is the size of struct _p_DMNetworkComponentHeader. Since the struct contains PetscInt pointers we cannot use sizeof(struct). So, we need to explicitly calculate the size.
19: If the data header struct changes then this header size calculation needs to be updated. */
20: header->hsize = sizeof(struct _p_DMNetworkComponentHeader) + 5 * header->maxcomps * sizeof(PetscInt);
21: header->hsize /= sizeof(DMNetworkComponentGenericDataType);
22: #if defined(__NEC__)
23: /* NEC/LG: quick hack to keep data aligned on 8 bytes. */
24: header->hsize = (header->hsize + (8 - 1)) & ~(8 - 1);
25: #endif
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: PetscErrorCode DMNetworkInitializeHeaderComponentData(DM dm)
30: {
31: DM_Network *network = (DM_Network *)dm->data;
32: PetscInt np, p, defaultnumcomp = DMNETWORK_MAX_COMP_AT_POINT_DEFAULT;
34: PetscFunctionBegin;
35: np = network->cloneshared->pEnd - network->cloneshared->pStart;
36: if (network->header)
37: for (p = 0; p < np; p++) {
38: network->header[p].maxcomps = defaultnumcomp;
39: PetscCall(SetUpNetworkHeaderComponentValue(dm, &network->header[p], &network->cvalue[p]));
40: }
42: PetscFunctionReturn(PETSC_SUCCESS);
43: }
45: /*@
46: DMNetworkGetPlex - Gets the `DMPLEX` associated with this `DMNETWORK`
48: Not Collective
50: Input Parameter:
51: . dm - the `DMNETWORK` object
53: Output Parameter:
54: . plexdm - the `DMPLEX` object
56: Level: advanced
58: .seealso: `DM`, `DMNETWORK`, `DMPLEX`, `DMNetworkCreate()`
59: @*/
60: PetscErrorCode DMNetworkGetPlex(DM dm, DM *plexdm)
61: {
62: DM_Network *network = (DM_Network *)dm->data;
64: PetscFunctionBegin;
65: *plexdm = network->plex;
66: PetscFunctionReturn(PETSC_SUCCESS);
67: }
69: /*@
70: DMNetworkGetNumSubNetworks - Gets the the number of subnetworks
72: Not Collective
74: Input Parameter:
75: . dm - the `DMNETWORK` object
77: Output Parameters:
78: + nsubnet - local number of subnetworks
79: - Nsubnet - global number of subnetworks
81: Level: beginner
83: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkSetNumSubNetworks()`
84: @*/
85: PetscErrorCode DMNetworkGetNumSubNetworks(DM dm, PetscInt *nsubnet, PetscInt *Nsubnet)
86: {
87: DM_Network *network = (DM_Network *)dm->data;
89: PetscFunctionBegin;
90: if (nsubnet) *nsubnet = network->cloneshared->nsubnet;
91: if (Nsubnet) *Nsubnet = network->cloneshared->Nsubnet;
92: PetscFunctionReturn(PETSC_SUCCESS);
93: }
95: /*@
96: DMNetworkSetNumSubNetworks - Sets the number of subnetworks
98: Collective
100: Input Parameters:
101: + dm - the `DMNETWORK` object
102: . nsubnet - local number of subnetworks
103: - Nsubnet - global number of subnetworks
105: Level: beginner
107: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkGetNumSubNetworks()`
108: @*/
109: PetscErrorCode DMNetworkSetNumSubNetworks(DM dm, PetscInt nsubnet, PetscInt Nsubnet)
110: {
111: DM_Network *network = (DM_Network *)dm->data;
113: PetscFunctionBegin;
114: PetscCheck(network->cloneshared->Nsubnet == 0, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_INCOMP, "Network sizes already set, cannot resize the network");
120: if (Nsubnet == PETSC_DECIDE) {
121: PetscCheck(nsubnet >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of local subnetworks %" PetscInt_FMT " cannot be less than 0", nsubnet);
122: PetscCall(MPIU_Allreduce(&nsubnet, &Nsubnet, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
123: }
124: PetscCheck(Nsubnet >= 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_INCOMP, "Number of global subnetworks %" PetscInt_FMT " cannot be less than 1", Nsubnet);
126: network->cloneshared->Nsubnet = Nsubnet;
127: network->cloneshared->nsubnet = 0; /* initial value; will be determined by DMNetworkAddSubnetwork() */
128: PetscCall(PetscCalloc1(Nsubnet, &network->cloneshared->subnet));
130: /* num of shared vertices */
131: network->cloneshared->nsvtx = 0;
132: network->cloneshared->Nsvtx = 0;
133: PetscFunctionReturn(PETSC_SUCCESS);
134: }
136: /*@C
137: DMNetworkAddSubnetwork - Add a subnetwork
139: Collective
141: Input Parameters:
142: + dm - the `DMNETWORK` object
143: . name - name of the subnetwork
144: . ne - number of local edges of this subnetwork
145: - edgelist - list of edges for this subnetwork, this is a one dimensional array with pairs of entries being the two vertices (in global numbering
146: of the vertices) of each edge,
147: $ [first vertex of first edge, second vertex of first edge, first vertex of second edge, second vertex of second edge, etc]
149: Output Parameter:
150: . netnum - global index of the subnetwork
152: Level: beginner
154: Notes:
155: There is no copy involved in this operation, only the pointer is referenced. The edgelist should
156: not be destroyed before the call to `DMNetworkLayoutSetUp()`
158: A network can comprise of a single subnetwork OR multiple subnetworks. For a single subnetwork, the subnetwork can be read either in serial or parallel.
159: For a multiple subnetworks,
160: each subnetwork topology needs to be set on a unique rank and the communicator size needs to be at least equal to the number of subnetworks.
162: Example usage:
163: Consider the following networks\:
164: 1) A single subnetwork\:
165: .vb
166: network 0:
167: rank[0]:
168: v0 --> v2; v1 --> v2
169: rank[1]:
170: v3 --> v5; v4 --> v5
171: .ve
173: The resulting input network 0\:
174: .vb
175: rank[0]:
176: ne = 2
177: edgelist = [0 2 | 1 2]
179: rank[1]:
180: ne = 2
181: edgelist = [3 5 | 4 5]
182: .ve
183: 2) Two subnetworks\:
184: .vb
185: subnetwork 0:
186: rank[0]:
187: v0 --> v2; v2 --> v1; v1 --> v3;
188: subnetwork 1:
189: rank[1]:
190: v0 --> v3; v3 --> v2; v2 --> v1;
191: .ve
193: The resulting input subnetwork 0\:
194: .vb
195: rank[0]:
196: ne = 3
197: edgelist = [0 2 | 2 1 | 1 3]
199: rank[1]:
200: ne = 0
201: edgelist = NULL
202: .ve
203: subnetwork 1\:
204: .vb
205: rank[0]:
206: ne = 0
207: edgelist = NULL
209: rank[1]:
210: edgelist = [0 3 | 3 2 | 2 1]
211: .ve
213: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkSetNumSubnetworks()`
214: @*/
215: PetscErrorCode DMNetworkAddSubnetwork(DM dm, const char *name, PetscInt ne, PetscInt edgelist[], PetscInt *netnum)
216: {
217: DM_Network *network = (DM_Network *)dm->data;
218: PetscInt i, Nedge, j, Nvtx, nvtx, nvtx_min = -1, nvtx_max = 0;
219: PetscBT table;
221: PetscFunctionBegin;
222: for (i = 0; i < ne; i++) PetscCheck(edgelist[2 * i] != edgelist[2 * i + 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Edge %" PetscInt_FMT " has the same vertex %" PetscInt_FMT " at each endpoint", i, edgelist[2 * i]);
224: i = 0;
225: if (ne) nvtx_min = nvtx_max = edgelist[0];
226: for (j = 0; j < ne; j++) {
227: nvtx_min = PetscMin(nvtx_min, edgelist[i]);
228: nvtx_max = PetscMax(nvtx_max, edgelist[i]);
229: i++;
230: nvtx_min = PetscMin(nvtx_min, edgelist[i]);
231: nvtx_max = PetscMax(nvtx_max, edgelist[i]);
232: i++;
233: }
234: Nvtx = nvtx_max - nvtx_min + 1; /* approximated total local nvtx for this subnet */
236: /* Get exact local nvtx for this subnet: counting local values between nvtx_min and nvtx_max */
237: PetscCall(PetscBTCreate(Nvtx, &table));
238: PetscCall(PetscBTMemzero(Nvtx, table));
239: i = 0;
240: for (j = 0; j < ne; j++) {
241: PetscCall(PetscBTSet(table, edgelist[i++] - nvtx_min));
242: PetscCall(PetscBTSet(table, edgelist[i++] - nvtx_min));
243: }
244: nvtx = 0;
245: for (j = 0; j < Nvtx; j++) {
246: if (PetscBTLookup(table, j)) nvtx++;
247: }
249: /* Get global total Nvtx = max(edgelist[])+1 for this subnet */
250: PetscCall(MPIU_Allreduce(&nvtx_max, &Nvtx, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)dm)));
251: Nvtx++;
252: PetscCall(PetscBTDestroy(&table));
254: /* Get global total Nedge for this subnet */
255: PetscCall(MPIU_Allreduce(&ne, &Nedge, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
257: i = network->cloneshared->nsubnet;
258: if (name) PetscCall(PetscStrncpy(network->cloneshared->subnet[i].name, name, sizeof(network->cloneshared->subnet[i].name)));
259: network->cloneshared->subnet[i].nvtx = nvtx; /* include ghost vertices */
260: network->cloneshared->subnet[i].nedge = ne;
261: network->cloneshared->subnet[i].edgelist = edgelist;
262: network->cloneshared->subnet[i].Nvtx = Nvtx;
263: network->cloneshared->subnet[i].Nedge = Nedge;
265: /* ----------------------------------------------------------
266: p=v or e;
267: subnet[0].pStart = 0
268: subnet[i+1].pStart = subnet[i].pEnd = subnet[i].pStart + (nE[i] or NV[i])
269: ----------------------------------------------------------------------- */
270: /* GLOBAL subnet[].vStart and vEnd, used by DMNetworkLayoutSetUp() */
271: network->cloneshared->subnet[i].vStart = network->cloneshared->NVertices;
272: network->cloneshared->subnet[i].vEnd = network->cloneshared->subnet[i].vStart + network->cloneshared->subnet[i].Nvtx; /* global vEnd of subnet[i] */
274: network->cloneshared->nVertices += nvtx; /* include ghost vertices */
275: network->cloneshared->NVertices += network->cloneshared->subnet[i].Nvtx;
277: /* LOCAL subnet[].eStart and eEnd, used by DMNetworkLayoutSetUp() */
278: network->cloneshared->subnet[i].eStart = network->cloneshared->nEdges;
279: network->cloneshared->subnet[i].eEnd = network->cloneshared->subnet[i].eStart + ne;
280: network->cloneshared->nEdges += ne;
281: network->cloneshared->NEdges += network->cloneshared->subnet[i].Nedge;
283: PetscCall(PetscStrncpy(network->cloneshared->subnet[i].name, name, sizeof(network->cloneshared->subnet[i].name)));
284: if (netnum) *netnum = network->cloneshared->nsubnet;
285: network->cloneshared->nsubnet++;
286: PetscFunctionReturn(PETSC_SUCCESS);
287: }
289: /*@C
290: DMNetworkSharedVertexGetInfo - Get info of a shared vertex struct, see petsc/private/dmnetworkimpl.h
292: Not Collective
294: Input Parameters:
295: + dm - the `DM` object
296: - v - vertex point
298: Output Parameters:
299: + gidx - global number of this shared vertex in the internal dmplex
300: . n - number of subnetworks that share this vertex
301: - sv - array of size n: sv[2*i,2*i+1]=(net[i], idx[i]), i=0,...,n-1
303: Level: intermediate
305: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetSharedVertices()`
306: @*/
307: PetscErrorCode DMNetworkSharedVertexGetInfo(DM dm, PetscInt v, PetscInt *gidx, PetscInt *n, const PetscInt **sv)
308: {
309: DM_Network *network = (DM_Network *)dm->data;
310: SVtx *svtx = network->cloneshared->svtx;
311: PetscInt i, gidx_tmp;
313: PetscFunctionBegin;
314: PetscCall(DMNetworkGetGlobalVertexIndex(dm, v, &gidx_tmp));
315: PetscCall(PetscHMapIGetWithDefault(network->cloneshared->svtable, gidx_tmp + 1, 0, &i));
316: PetscCheck(i > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "input vertex is not a shared vertex");
318: i--;
319: if (gidx) *gidx = gidx_tmp;
320: if (n) *n = svtx[i].n;
321: if (sv) *sv = svtx[i].sv;
322: PetscFunctionReturn(PETSC_SUCCESS);
323: }
325: /*
326: VtxGetInfo - Get info of an input vertex=(net,idx)
328: Input Parameters:
329: + Nsvtx - global num of shared vertices
330: . svtx - array of shared vertices (global)
331: - (net,idx) - subnet number and local index for a vertex
333: Output Parameters:
334: + gidx - global index of (net,idx)
335: . svtype - see petsc/private/dmnetworkimpl.h
336: - svtx_idx - ordering in the svtx array
337: */
338: static inline PetscErrorCode VtxGetInfo(PetscInt Nsvtx, SVtx *svtx, PetscInt net, PetscInt idx, PetscInt *gidx, SVtxType *svtype, PetscInt *svtx_idx)
339: {
340: PetscInt i, j, *svto, g_idx;
341: SVtxType vtype;
343: PetscFunctionBegin;
344: if (!Nsvtx) PetscFunctionReturn(PETSC_SUCCESS);
346: g_idx = -1;
347: vtype = SVNONE;
349: for (i = 0; i < Nsvtx; i++) {
350: if (net == svtx[i].sv[0] && idx == svtx[i].sv[1]) {
351: g_idx = svtx[i].gidx;
352: vtype = SVFROM;
353: } else { /* loop over svtx[i].n */
354: for (j = 1; j < svtx[i].n; j++) {
355: svto = svtx[i].sv + 2 * j;
356: if (net == svto[0] && idx == svto[1]) {
357: /* input vertex net.idx is a shared to_vertex, output its global index and its svtype */
358: g_idx = svtx[i].gidx; /* output gidx for to_vertex */
359: vtype = SVTO;
360: }
361: }
362: }
363: if (vtype != SVNONE) break;
364: }
365: if (gidx) *gidx = g_idx;
366: if (svtype) *svtype = vtype;
367: if (svtx_idx) *svtx_idx = i;
368: PetscFunctionReturn(PETSC_SUCCESS);
369: }
371: /*
372: TableAddSVtx - Add a new shared vertice from sedgelist[k] to a ctable svta
374: Input: network, sedgelist, k, svta
375: Output: svta, tdata, ta2sv
376: */
377: static inline PetscErrorCode TableAddSVtx(DM_Network *network, PetscInt *sedgelist, PetscInt k, PetscHMapI svta, PetscInt *tdata, PetscInt *ta2sv)
378: {
379: PetscInt net, idx, gidx;
381: PetscFunctionBegin;
382: net = sedgelist[k];
383: idx = sedgelist[k + 1];
384: gidx = network->cloneshared->subnet[net].vStart + idx;
385: PetscCall(PetscHMapISet(svta, gidx + 1, *tdata + 1));
387: ta2sv[*tdata] = k; /* maps tdata to index of sedgelist */
388: (*tdata)++;
389: PetscFunctionReturn(PETSC_SUCCESS);
390: }
392: /*
393: SharedVtxCreate - Create an array of global shared vertices. See SVtx and SVtxType in dmnetworkimpl.h
395: Input: dm, Nsedgelist, sedgelist
397: Note: Output svtx is organized as
398: sv(net[0],idx[0]) --> sv(net[1],idx[1])
399: --> sv(net[1],idx[1])
400: ...
401: --> sv(net[n-1],idx[n-1])
402: and net[0] < net[1] < ... < net[n-1]
403: where sv[0] has SVFROM type, sv[i], i>0, has SVTO type.
404: */
405: static PetscErrorCode SharedVtxCreate(DM dm, PetscInt Nsedgelist, PetscInt *sedgelist)
406: {
407: SVtx *svtx = NULL;
408: PetscInt *sv, k, j, nsv, *tdata, **ta2sv;
409: PetscHMapI *svtas;
410: PetscInt gidx, net, idx, i, nta, ita, idx_from, idx_to, n, *net_tmp, *idx_tmp, *gidx_tmp;
411: DM_Network *network = (DM_Network *)dm->data;
412: PetscHashIter ppos;
414: PetscFunctionBegin;
415: /* (1) Crete an array of ctables svtas to map (net,idx) -> gidx; a svtas[] for a shared/merged vertex */
416: PetscCall(PetscCalloc3(Nsedgelist, &svtas, Nsedgelist, &tdata, 2 * Nsedgelist, &ta2sv));
418: k = 0; /* sedgelist vertex counter j = 4*k */
419: nta = 0; /* num of svta tables created = num of groups of shared vertices */
421: /* for j=0 */
422: PetscCall(PetscHMapICreateWithSize(2 * Nsedgelist, svtas + nta));
423: PetscCall(PetscMalloc1(2 * Nsedgelist, &ta2sv[nta]));
425: PetscCall(TableAddSVtx(network, sedgelist, k, svtas[nta], &tdata[nta], ta2sv[nta]));
426: PetscCall(TableAddSVtx(network, sedgelist, k + 2, svtas[nta], &tdata[nta], ta2sv[nta]));
427: nta++;
428: k += 4;
430: for (j = 1; j < Nsedgelist; j++) { /* j: sedgelist counter */
431: for (ita = 0; ita < nta; ita++) {
432: /* vfrom */
433: net = sedgelist[k];
434: idx = sedgelist[k + 1];
435: gidx = network->cloneshared->subnet[net].vStart + idx; /* global index of the vertex net.idx before merging shared vertices */
436: PetscCall(PetscHMapIGetWithDefault(svtas[ita], gidx + 1, 0, &idx_from));
438: /* vto */
439: net = sedgelist[k + 2];
440: idx = sedgelist[k + 3];
441: gidx = network->cloneshared->subnet[net].vStart + idx;
442: PetscCall(PetscHMapIGetWithDefault(svtas[ita], gidx + 1, 0, &idx_to));
444: if (idx_from || idx_to) { /* vfrom or vto is on table svtas[ita] */
445: idx_from--;
446: idx_to--;
447: if (idx_from < 0) { /* vto is on svtas[ita] */
448: PetscCall(TableAddSVtx(network, sedgelist, k, svtas[ita], &tdata[ita], ta2sv[ita]));
449: break;
450: } else if (idx_to < 0) {
451: PetscCall(TableAddSVtx(network, sedgelist, k + 2, svtas[ita], &tdata[ita], ta2sv[ita]));
452: break;
453: }
454: }
455: }
457: if (ita == nta) {
458: PetscCall(PetscHMapICreateWithSize(2 * Nsedgelist, svtas + nta));
459: PetscCall(PetscMalloc1(2 * Nsedgelist, &ta2sv[nta]));
461: PetscCall(TableAddSVtx(network, sedgelist, k, svtas[nta], &tdata[nta], ta2sv[nta]));
462: PetscCall(TableAddSVtx(network, sedgelist, k + 2, svtas[nta], &tdata[nta], ta2sv[nta]));
463: nta++;
464: }
465: k += 4;
466: }
468: /* (2) Create svtable for query shared vertices using gidx */
469: PetscCall(PetscHMapICreateWithSize(nta, &network->cloneshared->svtable));
471: /* (3) Construct svtx from svtas
472: svtx: array of SVtx: sv[0]=(net[0],idx[0]) to vertices sv[k], k=1,...,n-1. */
473: PetscCall(PetscMalloc1(nta, &svtx));
474: for (nsv = 0; nsv < nta; nsv++) {
475: /* for a single svtx, put shared vertices in ascending order of gidx */
476: PetscCall(PetscHMapIGetSize(svtas[nsv], &n));
477: PetscCall(PetscCalloc1(2 * n, &sv));
478: PetscCall(PetscMalloc3(n, &gidx_tmp, n, &net_tmp, n, &idx_tmp));
479: svtx[nsv].sv = sv;
480: svtx[nsv].n = n;
481: svtx[nsv].gidx = network->cloneshared->NVertices; /* initialization */
483: PetscHashIterBegin(svtas[nsv], ppos);
484: for (k = 0; k < n; k++) { /* gidx is sorted in ascending order */
485: PetscHashIterGetKey(svtas[nsv], ppos, gidx);
486: PetscHashIterGetVal(svtas[nsv], ppos, i);
487: PetscHashIterNext(svtas[nsv], ppos);
488: gidx--;
489: i--;
490: j = ta2sv[nsv][i]; /* maps i to index of sedgelist */
491: net_tmp[k] = sedgelist[j]; /* subnet number */
492: idx_tmp[k] = sedgelist[j + 1]; /* index on the subnet */
493: gidx_tmp[k] = gidx; /* gidx in un-merged dmnetwork */
494: }
496: /* current implementation requires sv[]=[net,idx] in ascending order of its gidx in un-merged dmnetwork */
497: PetscCall(PetscSortIntWithArrayPair(n, gidx_tmp, net_tmp, idx_tmp));
498: svtx[nsv].gidx = gidx_tmp[0]; /* = min(gidx) */
499: for (k = 0; k < n; k++) {
500: sv[2 * k] = net_tmp[k];
501: sv[2 * k + 1] = idx_tmp[k];
502: }
503: PetscCall(PetscFree3(gidx_tmp, net_tmp, idx_tmp));
505: /* Setup svtable for query shared vertices */
506: PetscCall(PetscHMapISet(network->cloneshared->svtable, svtx[nsv].gidx + 1, nsv + 1));
507: }
509: for (j = 0; j < nta; j++) {
510: PetscCall(PetscHMapIDestroy(svtas + j));
511: PetscCall(PetscFree(ta2sv[j]));
512: }
513: PetscCall(PetscFree3(svtas, tdata, ta2sv));
515: network->cloneshared->Nsvtx = nta;
516: network->cloneshared->svtx = svtx;
517: PetscFunctionReturn(PETSC_SUCCESS);
518: }
520: /*
521: GetEdgelist_Coupling - Get an integrated edgelist for dmplex from user-provided subnet[].edgelist when subnets are coupled by shared vertices
523: Input Parameters:
524: . dm - the dmnetwork object
526: Output Parameters:
527: + edges - the integrated edgelist for dmplex
528: - nmerged_ptr - num of vertices being merged
529: */
530: static PetscErrorCode GetEdgelist_Coupling(DM dm, PetscInt *edges, PetscInt *nmerged_ptr)
531: {
532: MPI_Comm comm;
533: PetscMPIInt size, rank;
534: DM_Network *network = (DM_Network *)dm->data;
535: PetscInt i, j, ctr, np;
536: PetscInt *vidxlTog, Nsv, Nsubnet = network->cloneshared->Nsubnet;
537: PetscInt *sedgelist = network->cloneshared->sedgelist, vrange;
538: PetscInt net, idx, gidx, nmerged, gidx_from, net_from, sv_idx;
539: SVtxType svtype = SVNONE;
540: SVtx *svtx;
542: PetscFunctionBegin;
543: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
544: PetscCallMPI(MPI_Comm_rank(comm, &rank));
545: PetscCallMPI(MPI_Comm_size(comm, &size));
547: /* (1) Create global svtx[] from sedgelist */
548: /* --------------------------------------- */
549: PetscCall(SharedVtxCreate(dm, network->cloneshared->Nsvtx, sedgelist));
550: Nsv = network->cloneshared->Nsvtx;
551: svtx = network->cloneshared->svtx;
553: /* (2) Merge shared vto vertices to their vfrom vertex with same global vertex index (gidx) */
554: /* --------------------------------------------------------------------------------------- */
555: /* (2.1) compute vrage[rank]: global index of 1st local vertex in proc[rank] */
556: PetscCall(PetscMalloc1(network->cloneshared->nVertices, &vidxlTog));
558: PetscCallMPI(MPI_Scan(&network->cloneshared->nVertices, &vrange, 1, MPIU_INT, MPI_SUM, comm));
559: vrange -= network->cloneshared->nVertices;
561: /* (2.2) Create vidxlTog: maps UN-MERGED local vertex index i to global index gidx (plex, excluding ghost vertices) */
562: i = 0;
563: gidx = 0;
564: nmerged = 0; /* local num of merged vertices */
565: network->cloneshared->nsvtx = 0; /* local num of SVtx structs, including ghosts */
566: for (net = 0; net < Nsubnet; net++) {
567: for (idx = 0; idx < network->cloneshared->subnet[net].Nvtx; idx++) { /* Note: global subnet[net].Nvtx */
568: PetscCall(VtxGetInfo(Nsv, svtx, net, idx, &gidx_from, &svtype, &sv_idx));
569: if (svtype == SVTO) {
570: if (network->cloneshared->subnet[net].nvtx) { /* this proc owns sv_to */
571: net_from = svtx[sv_idx].sv[0]; /* subnet number of its shared vertex */
572: if (network->cloneshared->subnet[net_from].nvtx == 0) {
573: /* this proc does not own v_from, thus a ghost local vertex */
574: network->cloneshared->nsvtx++;
575: }
576: vidxlTog[i++] = gidx_from; /* gidx before merging! Bug??? */
577: nmerged++; /* a shared vertex -- merged */
578: }
579: } else {
580: if (svtype == SVFROM && network->cloneshared->subnet[net].nvtx) {
581: /* this proc owns this v_from, a new local shared vertex */
582: network->cloneshared->nsvtx++;
583: }
584: if (network->cloneshared->subnet[net].nvtx) vidxlTog[i++] = gidx;
585: gidx++;
586: }
587: }
588: }
589: #if defined(PETSC_USE_DEBUG)
590: PetscCheck(i == network->cloneshared->nVertices, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "%" PetscInt_FMT " != %" PetscInt_FMT " nVertices", i, network->cloneshared->nVertices);
591: #endif
593: /* (2.3) Shared vertices in the subnetworks are merged, update global NVertices: np = sum(local nmerged) */
594: PetscCall(MPIU_Allreduce(&nmerged, &np, 1, MPIU_INT, MPI_SUM, comm));
595: network->cloneshared->NVertices -= np;
597: ctr = 0;
598: for (net = 0; net < Nsubnet; net++) {
599: for (j = 0; j < network->cloneshared->subnet[net].nedge; j++) {
600: /* vfrom: */
601: i = network->cloneshared->subnet[net].edgelist[2 * j] + (network->cloneshared->subnet[net].vStart - vrange);
602: edges[2 * ctr] = vidxlTog[i];
604: /* vto */
605: i = network->cloneshared->subnet[net].edgelist[2 * j + 1] + (network->cloneshared->subnet[net].vStart - vrange);
606: edges[2 * ctr + 1] = vidxlTog[i];
607: ctr++;
608: }
609: }
610: PetscCall(PetscFree(vidxlTog));
611: PetscCall(PetscFree(sedgelist)); /* created in DMNetworkAddSharedVertices() */
613: *nmerged_ptr = nmerged;
614: PetscFunctionReturn(PETSC_SUCCESS);
615: }
617: PetscErrorCode DMNetworkInitializeNonTopological(DM dm)
618: {
619: DM_Network *network = (DM_Network *)dm->data;
620: PetscInt p, pStart = network->cloneshared->pStart, pEnd = network->cloneshared->pEnd;
621: MPI_Comm comm;
623: PetscFunctionBegin;
624: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
626: PetscCall(PetscSectionCreate(comm, &network->DataSection));
627: PetscCall(PetscSectionCreate(comm, &network->DofSection));
628: PetscCall(PetscSectionSetChart(network->DataSection, pStart, pEnd));
629: PetscCall(PetscSectionSetChart(network->DofSection, pStart, pEnd));
631: PetscCall(DMNetworkInitializeHeaderComponentData(dm));
633: for (p = 0; p < pEnd - pStart; p++) {
634: network->header[p].ndata = 0;
635: network->header[p].offset[0] = 0;
636: network->header[p].offsetvarrel[0] = 0;
637: PetscCall(PetscSectionAddDof(network->DataSection, p, network->header[p].hsize));
638: }
639: PetscFunctionReturn(PETSC_SUCCESS);
640: }
642: /*@
643: DMNetworkLayoutSetUp - Sets up the bare layout (graph) for the network
645: Not Collective
647: Input Parameter:
648: . dm - the `DMNETWORK` object
650: Level: beginner
652: Notes:
653: This routine should be called after the network sizes and edgelists have been provided. It creates
654: the bare layout of the network and sets up the network to begin insertion of components.
656: All the components should be registered before calling this routine.
658: .seealso: `DM`, `DMNETWORK`, `DMNetworkSetNumSubNetworks()`, `DMNetworkAddSubnetwork()`
659: @*/
660: PetscErrorCode DMNetworkLayoutSetUp(DM dm)
661: {
662: DM_Network *network = (DM_Network *)dm->data;
663: PetscInt i, j, ctr, Nsubnet = network->cloneshared->Nsubnet, np, *edges, *subnetvtx, *subnetedge, e, v, vfrom, vto, net, globaledgeoff;
664: const PetscInt *cone;
665: MPI_Comm comm;
666: PetscMPIInt size;
667: PetscSection sectiong;
668: PetscInt nmerged = 0;
670: PetscFunctionBegin;
671: PetscCall(PetscLogEventBegin(DMNetwork_LayoutSetUp, dm, 0, 0, 0));
672: PetscCheck(network->cloneshared->nsubnet == Nsubnet, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Must call DMNetworkAddSubnetwork() %" PetscInt_FMT " times", Nsubnet);
674: /* This implementation requires user input each subnet by a single processor when Nsubnet>1, thus subnet[net].nvtx=subnet[net].Nvtx when net>0 */
675: for (net = 1; net < Nsubnet; net++) {
676: if (network->cloneshared->subnet[net].nvtx)
677: PetscCheck(network->cloneshared->subnet[net].nvtx == network->cloneshared->subnet[net].Nvtx, PETSC_COMM_SELF, PETSC_ERR_SUP, "subnetwork %" PetscInt_FMT " local num of vertices %" PetscInt_FMT " != %" PetscInt_FMT " global num", net,
678: network->cloneshared->subnet[net].nvtx, network->cloneshared->subnet[net].Nvtx);
679: }
681: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
682: PetscCallMPI(MPI_Comm_size(comm, &size));
684: /* Create LOCAL edgelist in global vertex ordering for the network by concatenating local input edgelists of the subnetworks */
685: PetscCall(PetscCalloc1(2 * network->cloneshared->nEdges, &edges));
687: if (network->cloneshared->Nsvtx) { /* subnetworks are coupled via shared vertices */
688: PetscCall(GetEdgelist_Coupling(dm, edges, &nmerged));
689: } else { /* subnetworks are not coupled */
690: /* Create a 0-size svtable for query shared vertices */
691: PetscCall(PetscHMapICreate(&network->cloneshared->svtable));
692: ctr = 0;
693: for (i = 0; i < Nsubnet; i++) {
694: for (j = 0; j < network->cloneshared->subnet[i].nedge; j++) {
695: edges[2 * ctr] = network->cloneshared->subnet[i].vStart + network->cloneshared->subnet[i].edgelist[2 * j];
696: edges[2 * ctr + 1] = network->cloneshared->subnet[i].vStart + network->cloneshared->subnet[i].edgelist[2 * j + 1];
697: ctr++;
698: }
699: }
700: }
702: /* Create network->plex; One dimensional network, numCorners=2 */
703: PetscCall(DMCreate(comm, &network->plex));
704: PetscCall(DMSetType(network->plex, DMPLEX));
705: PetscCall(DMSetDimension(network->plex, 1));
707: if (size == 1) PetscCall(DMPlexBuildFromCellList(network->plex, network->cloneshared->nEdges, PETSC_DECIDE, 2, edges));
708: else PetscCall(DMPlexBuildFromCellListParallel(network->plex, network->cloneshared->nEdges, PETSC_DECIDE, PETSC_DECIDE, 2, edges, NULL, NULL));
710: PetscCall(DMPlexGetChart(network->plex, &network->cloneshared->pStart, &network->cloneshared->pEnd));
711: PetscCall(DMPlexGetHeightStratum(network->plex, 0, &network->cloneshared->eStart, &network->cloneshared->eEnd));
712: PetscCall(DMPlexGetHeightStratum(network->plex, 1, &network->cloneshared->vStart, &network->cloneshared->vEnd));
713: np = network->cloneshared->pEnd - network->cloneshared->pStart;
714: PetscCall(PetscCalloc2(np, &network->header, np, &network->cvalue));
716: /* Create edge and vertex arrays for the subnetworks
717: This implementation assumes that DMNetwork reads
718: (1) a single subnetwork in parallel; or
719: (2) n subnetworks using n processors, one subnetwork/processor.
720: */
721: PetscCall(PetscCalloc2(network->cloneshared->nEdges, &subnetedge, network->cloneshared->nVertices + network->cloneshared->nsvtx, &subnetvtx)); /* Maps local edge/vertex to local subnetwork's edge/vertex */
722: network->cloneshared->subnetedge = subnetedge;
723: network->cloneshared->subnetvtx = subnetvtx;
724: for (j = 0; j < Nsubnet; j++) {
725: network->cloneshared->subnet[j].edges = subnetedge;
726: subnetedge += network->cloneshared->subnet[j].nedge;
728: network->cloneshared->subnet[j].vertices = subnetvtx;
729: subnetvtx += network->cloneshared->subnet[j].nvtx;
730: }
731: network->cloneshared->svertices = subnetvtx;
733: /* Get edge ownership */
734: np = network->cloneshared->eEnd - network->cloneshared->eStart;
735: PetscCallMPI(MPI_Scan(&np, &globaledgeoff, 1, MPIU_INT, MPI_SUM, comm));
736: globaledgeoff -= np;
738: /* Setup local edge and vertex arrays for subnetworks */
739: e = 0;
740: for (i = 0; i < Nsubnet; i++) {
741: ctr = 0;
742: for (j = 0; j < network->cloneshared->subnet[i].nedge; j++) {
743: /* edge e */
744: network->header[e].index = e + globaledgeoff; /* Global edge index */
745: network->header[e].subnetid = i;
746: network->cloneshared->subnet[i].edges[j] = e;
748: /* connected vertices */
749: PetscCall(DMPlexGetCone(network->plex, e, &cone));
751: /* vertex cone[0] */
752: v = cone[0];
753: network->header[v].index = edges[2 * e]; /* Global vertex index */
754: network->header[v].subnetid = i; /* Subnetwork id */
755: if (Nsubnet == 1) {
756: network->cloneshared->subnet[i].vertices[v - network->cloneshared->vStart] = v; /* user's subnet[].idx = petsc's v */
757: } else {
758: vfrom = network->cloneshared->subnet[i].edgelist[2 * ctr]; /* =subnet[i].idx, Global index! */
759: network->cloneshared->subnet[i].vertices[vfrom] = v; /* user's subnet[].dix = petsc's v */
760: }
762: /* vertex cone[1] */
763: v = cone[1];
764: network->header[v].index = edges[2 * e + 1]; /* Global vertex index */
765: network->header[v].subnetid = i; /* Subnetwork id */
766: if (Nsubnet == 1) {
767: network->cloneshared->subnet[i].vertices[v - network->cloneshared->vStart] = v; /* user's subnet[].idx = petsc's v */
768: } else {
769: vto = network->cloneshared->subnet[i].edgelist[2 * ctr + 1]; /* =subnet[i].idx, Global index! */
770: network->cloneshared->subnet[i].vertices[vto] = v; /* user's subnet[].dix = petsc's v */
771: }
773: e++;
774: ctr++;
775: }
776: }
777: PetscCall(PetscFree(edges));
779: /* Set local vertex array for the subnetworks */
780: j = 0;
781: for (v = network->cloneshared->vStart; v < network->cloneshared->vEnd; v++) {
782: /* local shared vertex */
783: PetscCall(PetscHMapIGetWithDefault(network->cloneshared->svtable, network->header[v].index + 1, 0, &i));
784: if (i) network->cloneshared->svertices[j++] = v;
785: }
787: /* Create a global section to be used by DMNetworkIsGhostVertex() which is a non-collective routine */
788: /* see snes_tutorials_network-ex1_4 */
789: PetscCall(DMGetGlobalSection(network->plex, §iong));
790: /* Initialize non-topological data structures */
791: PetscCall(DMNetworkInitializeNonTopological(dm));
792: PetscCall(PetscLogEventEnd(DMNetwork_LayoutSetUp, dm, 0, 0, 0));
793: PetscFunctionReturn(PETSC_SUCCESS);
794: }
796: /*@C
797: DMNetworkGetSubnetwork - Returns the information about a requested subnetwork
799: Not Collective
801: Input Parameters:
802: + dm - the `DMNETWORK` object
803: - netnum - the global index of the subnetwork
805: Output Parameters:
806: + nv - number of vertices (local)
807: . ne - number of edges (local)
808: . vtx - local vertices of the subnetwork
809: - edge - local edges of the subnetwork
811: Level: intermediate
813: Notes:
814: Cannot call this routine before `DMNetworkLayoutSetup()`
816: The local vertices returned on each rank are determined by `DMNETWORK`. The user does not have any control over what vertices are local.
818: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkAddSubnetwork()`, `DMNetworkLayoutSetUp()`
819: @*/
820: PetscErrorCode DMNetworkGetSubnetwork(DM dm, PetscInt netnum, PetscInt *nv, PetscInt *ne, const PetscInt **vtx, const PetscInt **edge)
821: {
822: DM_Network *network = (DM_Network *)dm->data;
824: PetscFunctionBegin;
825: PetscCheck(netnum < network->cloneshared->Nsubnet, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Subnet index %" PetscInt_FMT " exceeds the num of subnets %" PetscInt_FMT, netnum, network->cloneshared->Nsubnet);
826: if (nv) *nv = network->cloneshared->subnet[netnum].nvtx;
827: if (ne) *ne = network->cloneshared->subnet[netnum].nedge;
828: if (vtx) *vtx = network->cloneshared->subnet[netnum].vertices;
829: if (edge) *edge = network->cloneshared->subnet[netnum].edges;
830: PetscFunctionReturn(PETSC_SUCCESS);
831: }
833: /*@
834: DMNetworkAddSharedVertices - Add shared vertices that connect two given subnetworks
836: Collective
838: Input Parameters:
839: + dm - the `DMNETWORK` object
840: . anetnum - first subnetwork global numbering returned by `DMNetworkAddSubnetwork()`
841: . bnetnum - second subnetwork global numbering returned by `DMNetworkAddSubnetwork()`
842: . nsvtx - number of vertices that are shared by the two subnetworks
843: . asvtx - vertex index in the first subnetwork
844: - bsvtx - vertex index in the second subnetwork
846: Level: beginner
848: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkAddSubnetwork()`, `DMNetworkGetSharedVertices()`
849: @*/
850: PetscErrorCode DMNetworkAddSharedVertices(DM dm, PetscInt anetnum, PetscInt bnetnum, PetscInt nsvtx, PetscInt asvtx[], PetscInt bsvtx[])
851: {
852: DM_Network *network = (DM_Network *)dm->data;
853: PetscInt i, nsubnet = network->cloneshared->Nsubnet, *sedgelist, Nsvtx = network->cloneshared->Nsvtx;
855: PetscFunctionBegin;
856: PetscCheck(anetnum != bnetnum, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Subnetworks must have different netnum");
857: PetscCheck(anetnum >= 0 && bnetnum >= 0, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "netnum cannot be negative");
858: if (!Nsvtx) {
859: /* allocate network->sedgelist to hold at most 2*nsubnet pairs of shared vertices */
860: PetscCall(PetscMalloc1(2 * 4 * nsubnet, &network->cloneshared->sedgelist));
861: }
863: sedgelist = network->cloneshared->sedgelist;
864: for (i = 0; i < nsvtx; i++) {
865: sedgelist[4 * Nsvtx] = anetnum;
866: sedgelist[4 * Nsvtx + 1] = asvtx[i];
867: sedgelist[4 * Nsvtx + 2] = bnetnum;
868: sedgelist[4 * Nsvtx + 3] = bsvtx[i];
869: Nsvtx++;
870: }
871: PetscCheck(Nsvtx <= 2 * nsubnet, PETSC_COMM_SELF, PETSC_ERR_SUP, "allocate more space for coupling edgelist");
872: network->cloneshared->Nsvtx = Nsvtx;
873: PetscFunctionReturn(PETSC_SUCCESS);
874: }
876: /*@C
877: DMNetworkGetSharedVertices - Returns the info for the shared vertices
879: Not Collective
881: Input Parameter:
882: . dm - the `DMNETWORK` object
884: Output Parameters:
885: + nsv - number of local shared vertices
886: - svtx - local shared vertices
888: Level: intermediate
890: Notes:
891: Cannot call this routine before `DMNetworkLayoutSetup()`
893: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetSubnetwork()`, `DMNetworkLayoutSetUp()`, `DMNetworkAddSharedVertices()`
894: @*/
895: PetscErrorCode DMNetworkGetSharedVertices(DM dm, PetscInt *nsv, const PetscInt **svtx)
896: {
897: DM_Network *net = (DM_Network *)dm->data;
899: PetscFunctionBegin;
901: if (nsv) *nsv = net->cloneshared->nsvtx;
902: if (svtx) *svtx = net->cloneshared->svertices;
903: PetscFunctionReturn(PETSC_SUCCESS);
904: }
906: /*@C
907: DMNetworkRegisterComponent - Registers the network component
909: Logically Collective
911: Input Parameters:
912: + dm - the `DMNETWORK` object
913: . name - the component name
914: - size - the storage size in bytes for this component data
916: Output Parameter:
917: . key - an integer key that defines the component
919: Level: beginner
921: Note:
922: This routine should be called by all processors before calling `DMNetworkLayoutSetup()`.
924: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkLayoutSetUp()`
925: @*/
926: PetscErrorCode DMNetworkRegisterComponent(DM dm, const char *name, size_t size, PetscInt *key)
927: {
928: DM_Network *network = (DM_Network *)dm->data;
929: DMNetworkComponent *component = NULL, *newcomponent = NULL;
930: PetscBool flg = PETSC_FALSE;
931: PetscInt i;
933: PetscFunctionBegin;
934: if (!network->component) PetscCall(PetscCalloc1(network->max_comps_registered, &network->component));
936: for (i = 0; i < network->ncomponent; i++) {
937: PetscCall(PetscStrcmp(network->component[i].name, name, &flg));
938: if (flg) {
939: *key = i;
940: PetscFunctionReturn(PETSC_SUCCESS);
941: }
942: }
944: if (network->ncomponent == network->max_comps_registered) {
945: /* Reached max allowed so resize component */
946: network->max_comps_registered += 2;
947: PetscCall(PetscCalloc1(network->max_comps_registered, &newcomponent));
948: /* Copy over the previous component info */
949: for (i = 0; i < network->ncomponent; i++) {
950: PetscCall(PetscStrncpy(newcomponent[i].name, network->component[i].name, sizeof(newcomponent[i].name)));
951: newcomponent[i].size = network->component[i].size;
952: }
953: /* Free old one */
954: PetscCall(PetscFree(network->component));
955: /* Update pointer */
956: network->component = newcomponent;
957: }
959: component = &network->component[network->ncomponent];
961: PetscCall(PetscStrncpy(component->name, name, sizeof(component->name)));
962: component->size = size / sizeof(DMNetworkComponentGenericDataType);
963: *key = network->ncomponent;
964: network->ncomponent++;
965: PetscFunctionReturn(PETSC_SUCCESS);
966: }
968: /*@
969: DMNetworkGetNumVertices - Get the local and global number of vertices for the entire network.
971: Not Collective
973: Input Parameter:
974: . dm - the `DMNETWORK` object
976: Output Parameters:
977: + nVertices - the local number of vertices
978: - NVertices - the global number of vertices
980: Level: beginner
982: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetNumEdges()`
983: @*/
984: PetscErrorCode DMNetworkGetNumVertices(DM dm, PetscInt *nVertices, PetscInt *NVertices)
985: {
986: DM_Network *network = (DM_Network *)dm->data;
988: PetscFunctionBegin;
990: if (nVertices) {
991: PetscAssertPointer(nVertices, 2);
992: *nVertices = network->cloneshared->nVertices;
993: }
994: if (NVertices) {
995: PetscAssertPointer(NVertices, 3);
996: *NVertices = network->cloneshared->NVertices;
997: }
998: PetscFunctionReturn(PETSC_SUCCESS);
999: }
1001: /*@
1002: DMNetworkGetNumEdges - Get the local and global number of edges for the entire network.
1004: Not Collective
1006: Input Parameter:
1007: . dm - the `DMNETWORK` object
1009: Output Parameters:
1010: + nEdges - the local number of edges
1011: - NEdges - the global number of edges
1013: Level: beginner
1015: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetNumVertices()`
1016: @*/
1017: PetscErrorCode DMNetworkGetNumEdges(DM dm, PetscInt *nEdges, PetscInt *NEdges)
1018: {
1019: DM_Network *network = (DM_Network *)dm->data;
1021: PetscFunctionBegin;
1023: if (nEdges) {
1024: PetscAssertPointer(nEdges, 2);
1025: *nEdges = network->cloneshared->nEdges;
1026: }
1027: if (NEdges) {
1028: PetscAssertPointer(NEdges, 3);
1029: *NEdges = network->cloneshared->NEdges;
1030: }
1031: PetscFunctionReturn(PETSC_SUCCESS);
1032: }
1034: /*@
1035: DMNetworkGetVertexRange - Get the bounds [start, end) for the local vertices
1037: Not Collective
1039: Input Parameter:
1040: . dm - the `DMNETWORK` object
1042: Output Parameters:
1043: + vStart - the first vertex point
1044: - vEnd - one beyond the last vertex point
1046: Level: beginner
1048: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetEdgeRange()`
1049: @*/
1050: PetscErrorCode DMNetworkGetVertexRange(DM dm, PetscInt *vStart, PetscInt *vEnd)
1051: {
1052: DM_Network *network = (DM_Network *)dm->data;
1054: PetscFunctionBegin;
1055: if (vStart) *vStart = network->cloneshared->vStart;
1056: if (vEnd) *vEnd = network->cloneshared->vEnd;
1057: PetscFunctionReturn(PETSC_SUCCESS);
1058: }
1060: /*@
1061: DMNetworkGetEdgeRange - Get the bounds [start, end) for the local edges
1063: Not Collective
1065: Input Parameter:
1066: . dm - the `DMNETWORK` object
1068: Output Parameters:
1069: + eStart - The first edge point
1070: - eEnd - One beyond the last edge point
1072: Level: beginner
1074: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetVertexRange()`
1075: @*/
1076: PetscErrorCode DMNetworkGetEdgeRange(DM dm, PetscInt *eStart, PetscInt *eEnd)
1077: {
1078: DM_Network *network = (DM_Network *)dm->data;
1080: PetscFunctionBegin;
1082: if (eStart) *eStart = network->cloneshared->eStart;
1083: if (eEnd) *eEnd = network->cloneshared->eEnd;
1084: PetscFunctionReturn(PETSC_SUCCESS);
1085: }
1087: PetscErrorCode DMNetworkGetIndex(DM dm, PetscInt p, PetscInt *index)
1088: {
1089: DM_Network *network = (DM_Network *)dm->data;
1091: PetscFunctionBegin;
1092: if (network->header) {
1093: *index = network->header[p].index;
1094: } else {
1095: PetscInt offsetp;
1096: DMNetworkComponentHeader header;
1098: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offsetp));
1099: header = (DMNetworkComponentHeader)(network->componentdataarray + offsetp);
1100: *index = header->index;
1101: }
1102: PetscFunctionReturn(PETSC_SUCCESS);
1103: }
1105: PetscErrorCode DMNetworkGetSubnetID(DM dm, PetscInt p, PetscInt *subnetid)
1106: {
1107: DM_Network *network = (DM_Network *)dm->data;
1109: PetscFunctionBegin;
1110: if (network->header) {
1111: *subnetid = network->header[p].subnetid;
1112: } else {
1113: PetscInt offsetp;
1114: DMNetworkComponentHeader header;
1116: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offsetp));
1117: header = (DMNetworkComponentHeader)(network->componentdataarray + offsetp);
1118: *subnetid = header->subnetid;
1119: }
1120: PetscFunctionReturn(PETSC_SUCCESS);
1121: }
1123: /*@
1124: DMNetworkGetGlobalEdgeIndex - Get the global numbering for the edge on the network
1126: Not Collective
1128: Input Parameters:
1129: + dm - `DMNETWORK` object
1130: - p - edge point
1132: Output Parameter:
1133: . index - the global numbering for the edge
1135: Level: intermediate
1137: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetGlobalVertexIndex()`
1138: @*/
1139: PetscErrorCode DMNetworkGetGlobalEdgeIndex(DM dm, PetscInt p, PetscInt *index)
1140: {
1141: PetscFunctionBegin;
1142: PetscCall(DMNetworkGetIndex(dm, p, index));
1143: PetscFunctionReturn(PETSC_SUCCESS);
1144: }
1146: /*@
1147: DMNetworkGetGlobalVertexIndex - Get the global numbering for the vertex on the network
1149: Not Collective
1151: Input Parameters:
1152: + dm - `DMNETWORK` object
1153: - p - vertex point
1155: Output Parameter:
1156: . index - the global numbering for the vertex
1158: Level: intermediate
1160: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetGlobalEdgeIndex()`, `DMNetworkGetLocalVertexIndex()`
1161: @*/
1162: PetscErrorCode DMNetworkGetGlobalVertexIndex(DM dm, PetscInt p, PetscInt *index)
1163: {
1164: PetscFunctionBegin;
1165: PetscCall(DMNetworkGetIndex(dm, p, index));
1166: PetscFunctionReturn(PETSC_SUCCESS);
1167: }
1169: /*@
1170: DMNetworkGetNumComponents - Get the number of components at a vertex/edge
1172: Not Collective
1174: Input Parameters:
1175: + dm - the `DMNETWORK` object
1176: - p - vertex/edge point
1178: Output Parameter:
1179: . numcomponents - Number of components at the vertex/edge
1181: Level: beginner
1183: .seealso: `DM`, `DMNETWORK`, `DMNetworkRegisterComponent()`, `DMNetworkAddComponent()`
1184: @*/
1185: PetscErrorCode DMNetworkGetNumComponents(DM dm, PetscInt p, PetscInt *numcomponents)
1186: {
1187: PetscInt offset;
1188: DM_Network *network = (DM_Network *)dm->data;
1190: PetscFunctionBegin;
1191: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offset));
1192: *numcomponents = ((DMNetworkComponentHeader)(network->componentdataarray + offset))->ndata;
1193: PetscFunctionReturn(PETSC_SUCCESS);
1194: }
1196: /*@
1197: DMNetworkGetLocalVecOffset - Get the offset for accessing the variables associated with a component at the given vertex/edge from the local vector
1199: Not Collective
1201: Input Parameters:
1202: + dm - the `DMNETWORK` object
1203: . p - the edge or vertex point
1204: - compnum - component number; use ALL_COMPONENTS if no specific component is requested
1206: Output Parameter:
1207: . offset - the local offset
1209: Level: intermediate
1211: Notes:
1212: These offsets can be passed to `MatSetValuesLocal()` for matrices obtained with `DMCreateMatrix()`.
1214: For vectors obtained with `DMCreateLocalVector()` the offsets can be used with `VecSetValues()`.
1216: For vectors obtained with `DMCreateLocalVector()` and the array obtained with `VecGetArray`(vec,&array) you can access or set
1217: the vector values with array[offset].
1219: For vectors obtained with `DMCreateGlobalVector()` the offsets can be used with `VecSetValuesLocal()`.
1221: .seealso: `DM`, `DMNETWORK`, `DMGetLocalVector()`, `DMNetworkGetComponent()`, `DMNetworkGetGlobalVecOffset()`, `DMCreateGlobalVector()`, `VecGetArray()`, `VecSetValuesLocal()`, `MatSetValuesLocal()`
1222: @*/
1223: PetscErrorCode DMNetworkGetLocalVecOffset(DM dm, PetscInt p, PetscInt compnum, PetscInt *offset)
1224: {
1225: DM_Network *network = (DM_Network *)dm->data;
1226: PetscInt offsetp, offsetd;
1227: DMNetworkComponentHeader header;
1229: PetscFunctionBegin;
1230: PetscCall(PetscSectionGetOffset(network->plex->localSection, p, &offsetp));
1231: if (compnum == ALL_COMPONENTS) {
1232: *offset = offsetp;
1233: PetscFunctionReturn(PETSC_SUCCESS);
1234: }
1236: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offsetd));
1237: header = (DMNetworkComponentHeader)(network->componentdataarray + offsetd);
1238: *offset = offsetp + header->offsetvarrel[compnum];
1239: PetscFunctionReturn(PETSC_SUCCESS);
1240: }
1242: /*@
1243: DMNetworkGetGlobalVecOffset - Get the global offset for accessing the variables associated with a component for the given vertex/edge from the global vector
1245: Not Collective
1247: Input Parameters:
1248: + dm - the `DMNETWORK` object
1249: . p - the edge or vertex point
1250: - compnum - component number; use ALL_COMPONENTS if no specific component is requested
1252: Output Parameter:
1253: . offsetg - the global offset
1255: Level: intermediate
1257: Notes:
1258: These offsets can be passed to `MatSetValues()` for matrices obtained with `DMCreateMatrix()`.
1260: For vectors obtained with `DMCreateGlobalVector()` the offsets can be used with `VecSetValues()`.
1262: For vectors obtained with `DMCreateGlobalVector()` and the array obtained with `VecGetArray`(vec,&array) you can access or set
1263: the vector values with array[offset - rstart] where restart is obtained with `VecGetOwnershipRange`(v,&rstart,`NULL`);
1265: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetLocalVecOffset()`, `DMGetGlobalVector()`, `DMNetworkGetComponent()`, `DMCreateGlobalVector()`, `VecGetArray()`, `VecSetValues()`, `MatSetValues()`
1266: @*/
1267: PetscErrorCode DMNetworkGetGlobalVecOffset(DM dm, PetscInt p, PetscInt compnum, PetscInt *offsetg)
1268: {
1269: DM_Network *network = (DM_Network *)dm->data;
1270: PetscInt offsetp, offsetd;
1271: DMNetworkComponentHeader header;
1273: PetscFunctionBegin;
1274: PetscCall(PetscSectionGetOffset(network->plex->globalSection, p, &offsetp));
1275: if (offsetp < 0) offsetp = -(offsetp + 1); /* Convert to actual global offset for ghost vertex */
1277: if (compnum == ALL_COMPONENTS) {
1278: *offsetg = offsetp;
1279: PetscFunctionReturn(PETSC_SUCCESS);
1280: }
1281: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offsetd));
1282: header = (DMNetworkComponentHeader)(network->componentdataarray + offsetd);
1283: *offsetg = offsetp + header->offsetvarrel[compnum];
1284: PetscFunctionReturn(PETSC_SUCCESS);
1285: }
1287: /*@
1288: DMNetworkGetEdgeOffset - Get the offset for accessing the variables associated with the given edge from the local subvector
1290: Not Collective
1292: Input Parameters:
1293: + dm - the `DMNETWORK` object
1294: - p - the edge point
1296: Output Parameter:
1297: . offset - the offset
1299: Level: intermediate
1301: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetLocalVecOffset()`, `DMGetLocalVector()`
1302: @*/
1303: PetscErrorCode DMNetworkGetEdgeOffset(DM dm, PetscInt p, PetscInt *offset)
1304: {
1305: DM_Network *network = (DM_Network *)dm->data;
1307: PetscFunctionBegin;
1308: PetscCall(PetscSectionGetOffset(network->edge.DofSection, p, offset));
1309: PetscFunctionReturn(PETSC_SUCCESS);
1310: }
1312: /*@
1313: DMNetworkGetVertexOffset - Get the offset for accessing the variables associated with the given vertex from the local subvector
1315: Not Collective
1317: Input Parameters:
1318: + dm - the `DMNETWORK` object
1319: - p - the vertex point
1321: Output Parameter:
1322: . offset - the offset
1324: Level: intermediate
1326: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetEdgeOffset()`, `DMGetLocalVector()`
1327: @*/
1328: PetscErrorCode DMNetworkGetVertexOffset(DM dm, PetscInt p, PetscInt *offset)
1329: {
1330: DM_Network *network = (DM_Network *)dm->data;
1332: PetscFunctionBegin;
1333: p -= network->cloneshared->vStart;
1334: PetscCall(PetscSectionGetOffset(network->vertex.DofSection, p, offset));
1335: PetscFunctionReturn(PETSC_SUCCESS);
1336: }
1338: /*@
1339: DMNetworkAddComponent - Adds a network component and number of variables at the given point (vertex/edge)
1341: Collective
1343: Input Parameters:
1344: + dm - the DMNetwork
1345: . p - the vertex/edge point. These points are local indices provided by `DMNetworkGetSubnetwork()`
1346: . componentkey - component key returned while registering the component with `DMNetworkRegisterComponent()`
1347: . compvalue - pointer to the data structure for the component, or `NULL` if the component does not require data, this data is not copied so you cannot
1348: free this space until after `DMSetUp()` is called.
1349: - nvar - number of variables for the component at the vertex/edge point, zero if the component does not introduce any degrees of freedom at the point
1351: Level: beginner
1353: Notes:
1354: The owning rank and any other ranks that have this point as a ghost location must call this routine to add a component and number of variables in the same order at the given point.
1356: `DMNetworkLayoutSetUp()` must be called before this routine.
1358: Developer Notes:
1359: The requirement that all the ranks with access to a vertex (as owner or as ghost) add all the components comes from a limitation of the underlying implementation based on `DMPLEX`.
1361: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetComponent()`, `DMNetworkGetSubnetwork()`, `DMNetworkIsGhostVertex()`, `DMNetworkLayoutSetUp()`
1362: @*/
1363: PetscErrorCode DMNetworkAddComponent(DM dm, PetscInt p, PetscInt componentkey, void *compvalue, PetscInt nvar)
1364: {
1365: DM_Network *network = (DM_Network *)dm->data;
1366: DMNetworkComponent *component = &network->component[componentkey];
1367: DMNetworkComponentHeader header;
1368: DMNetworkComponentValue cvalue;
1369: PetscInt compnum;
1370: PetscInt *compsize, *compkey, *compoffset, *compnvar, *compoffsetvarrel;
1371: void **compdata;
1373: PetscFunctionBegin;
1374: PetscCheck(componentkey >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "componentkey %" PetscInt_FMT " cannot be negative. Input a component key returned while registering the component with DMNetworkRegisterComponent()", componentkey);
1375: PetscCheck(network->componentsetup == PETSC_FALSE, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "The network has already finalized the components. No new components can be added.");
1376: /* The owning rank and all ghost ranks add nvar */
1377: PetscCall(PetscSectionAddDof(network->DofSection, p, nvar));
1379: /* The owning rank and all ghost ranks add a component, including compvalue=NULL */
1380: header = &network->header[p];
1381: cvalue = &network->cvalue[p];
1382: if (header->ndata == header->maxcomps) {
1383: PetscInt additional_size;
1385: /* Reached limit so resize header component arrays */
1386: header->maxcomps += 2;
1388: /* Allocate arrays for component information and value */
1389: PetscCall(PetscCalloc5(header->maxcomps, &compsize, header->maxcomps, &compkey, header->maxcomps, &compoffset, header->maxcomps, &compnvar, header->maxcomps, &compoffsetvarrel));
1390: PetscCall(PetscMalloc1(header->maxcomps, &compdata));
1392: /* Recalculate header size */
1393: header->hsize = sizeof(struct _p_DMNetworkComponentHeader) + 5 * header->maxcomps * sizeof(PetscInt);
1394: header->hsize /= sizeof(DMNetworkComponentGenericDataType);
1395: #if defined(__NEC__)
1396: /* NEC/LG: quick hack to keep data aligned on 8 bytes. */
1397: header->hsize = (header->hsize + (8 - 1)) & ~(8 - 1);
1398: #endif
1400: /* Copy over component info */
1401: PetscCall(PetscMemcpy(compsize, header->size, header->ndata * sizeof(PetscInt)));
1402: PetscCall(PetscMemcpy(compkey, header->key, header->ndata * sizeof(PetscInt)));
1403: PetscCall(PetscMemcpy(compoffset, header->offset, header->ndata * sizeof(PetscInt)));
1404: PetscCall(PetscMemcpy(compnvar, header->nvar, header->ndata * sizeof(PetscInt)));
1405: PetscCall(PetscMemcpy(compoffsetvarrel, header->offsetvarrel, header->ndata * sizeof(PetscInt)));
1407: /* Copy over component data pointers */
1408: PetscCall(PetscMemcpy(compdata, cvalue->data, header->ndata * sizeof(void *)));
1410: /* Free old arrays */
1411: PetscCall(PetscFree5(header->size, header->key, header->offset, header->nvar, header->offsetvarrel));
1412: PetscCall(PetscFree(cvalue->data));
1414: /* Update pointers */
1415: header->size = compsize;
1416: header->key = compkey;
1417: header->offset = compoffset;
1418: header->nvar = compnvar;
1419: header->offsetvarrel = compoffsetvarrel;
1421: cvalue->data = compdata;
1423: /* Update DataSection Dofs */
1424: /* The dofs for datasection point p equals sizeof the header (i.e. header->hsize) + sizes of the components added at point p. With the resizing of the header, we need to update the dofs for point p. Hence, we add the extra size added for the header */
1425: additional_size = (5 * (header->maxcomps - header->ndata) * sizeof(PetscInt)) / sizeof(DMNetworkComponentGenericDataType);
1426: PetscCall(PetscSectionAddDof(network->DataSection, p, additional_size));
1427: }
1428: header = &network->header[p];
1429: cvalue = &network->cvalue[p];
1431: compnum = header->ndata;
1433: header->size[compnum] = component->size;
1434: PetscCall(PetscSectionAddDof(network->DataSection, p, component->size));
1435: header->key[compnum] = componentkey;
1436: if (compnum != 0) header->offset[compnum] = header->offset[compnum - 1] + header->size[compnum - 1];
1437: cvalue->data[compnum] = (void *)compvalue;
1439: /* variables */
1440: header->nvar[compnum] += nvar;
1441: if (compnum != 0) header->offsetvarrel[compnum] = header->offsetvarrel[compnum - 1] + header->nvar[compnum - 1];
1443: header->ndata++;
1444: PetscFunctionReturn(PETSC_SUCCESS);
1445: }
1447: /*@
1448: DMNetworkGetComponent - Gets the component key, the component data, and the number of variables at a given network point
1450: Not Collective
1452: Input Parameters:
1453: + dm - the `DMNETWORK` object
1454: . p - vertex/edge point
1455: - compnum - component number; use ALL_COMPONENTS if sum up all the components
1457: Output Parameters:
1458: + compkey - the key obtained when registering the component (use `NULL` if not required)
1459: . component - the component data (use `NULL` if not required)
1460: - nvar - number of variables (use `NULL` if not required)
1462: Level: beginner
1464: .seealso: `DM`, `DMNETWORK`, `DMNetworkAddComponent()`, `DMNetworkGetNumComponents()`
1465: @*/
1466: PetscErrorCode DMNetworkGetComponent(DM dm, PetscInt p, PetscInt compnum, PetscInt *compkey, void **component, PetscInt *nvar)
1467: {
1468: DM_Network *network = (DM_Network *)dm->data;
1469: PetscInt offset = 0;
1470: DMNetworkComponentHeader header;
1472: PetscFunctionBegin;
1473: if (compnum == ALL_COMPONENTS) {
1474: PetscCall(PetscSectionGetDof(network->DofSection, p, nvar));
1475: PetscFunctionReturn(PETSC_SUCCESS);
1476: }
1478: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offset));
1479: header = (DMNetworkComponentHeader)(network->componentdataarray + offset);
1481: if (compnum >= 0) {
1482: if (compkey) *compkey = header->key[compnum];
1483: if (component) {
1484: offset += header->hsize + header->offset[compnum];
1485: *component = network->componentdataarray + offset;
1486: }
1487: }
1489: if (nvar) *nvar = header->nvar[compnum];
1491: PetscFunctionReturn(PETSC_SUCCESS);
1492: }
1494: /*
1495: Sets up the array that holds the data for all components and its associated section.
1496: It copies the data for all components in a contiguous array called componentdataarray. The component data is stored pointwise with an additional header (metadata) stored for each point. The header has metadata information such as number of components at each point, number of variables for each component, offsets for the components data, etc.
1497: */
1498: static PetscErrorCode DMNetworkComponentSetUp(DM dm)
1499: {
1500: DM_Network *network = (DM_Network *)dm->data;
1501: PetscInt arr_size, p, offset, offsetp, ncomp, i, *headerarr;
1502: DMNetworkComponentHeader header;
1503: DMNetworkComponentValue cvalue;
1504: DMNetworkComponentHeader headerinfo;
1505: DMNetworkComponentGenericDataType *componentdataarray;
1507: PetscFunctionBegin;
1508: PetscCall(PetscSectionSetUp(network->DataSection));
1509: PetscCall(PetscSectionGetStorageSize(network->DataSection, &arr_size));
1510: /* arr_size+1 fixes pipeline test of opensolaris-misc for src/dm/tests/ex10.c -- Do not know why */
1511: PetscCall(PetscCalloc1(arr_size + 1, &network->componentdataarray));
1512: componentdataarray = network->componentdataarray;
1513: for (p = network->cloneshared->pStart; p < network->cloneshared->pEnd; p++) {
1514: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offsetp));
1515: /* Copy header */
1516: header = &network->header[p];
1517: headerinfo = (DMNetworkComponentHeader)(componentdataarray + offsetp);
1518: PetscCall(PetscMemcpy(headerinfo, header, sizeof(struct _p_DMNetworkComponentHeader)));
1519: headerarr = (PetscInt *)(headerinfo + 1);
1520: PetscCall(PetscMemcpy(headerarr, header->size, header->maxcomps * sizeof(PetscInt)));
1521: headerinfo->size = headerarr;
1522: headerarr += header->maxcomps;
1523: PetscCall(PetscMemcpy(headerarr, header->key, header->maxcomps * sizeof(PetscInt)));
1524: headerinfo->key = headerarr;
1525: headerarr += header->maxcomps;
1526: PetscCall(PetscMemcpy(headerarr, header->offset, header->maxcomps * sizeof(PetscInt)));
1527: headerinfo->offset = headerarr;
1528: headerarr += header->maxcomps;
1529: PetscCall(PetscMemcpy(headerarr, header->nvar, header->maxcomps * sizeof(PetscInt)));
1530: headerinfo->nvar = headerarr;
1531: headerarr += header->maxcomps;
1532: PetscCall(PetscMemcpy(headerarr, header->offsetvarrel, header->maxcomps * sizeof(PetscInt)));
1533: headerinfo->offsetvarrel = headerarr;
1535: /* Copy data */
1536: cvalue = &network->cvalue[p];
1537: ncomp = header->ndata;
1539: for (i = 0; i < ncomp; i++) {
1540: offset = offsetp + header->hsize + header->offset[i];
1541: PetscCall(PetscMemcpy(componentdataarray + offset, cvalue->data[i], header->size[i] * sizeof(DMNetworkComponentGenericDataType)));
1542: }
1543: }
1545: for (i = network->cloneshared->pStart; i < network->cloneshared->pEnd; i++) {
1546: PetscCall(PetscFree5(network->header[i].size, network->header[i].key, network->header[i].offset, network->header[i].nvar, network->header[i].offsetvarrel));
1547: PetscCall(PetscFree(network->cvalue[i].data));
1548: }
1549: PetscCall(PetscFree2(network->header, network->cvalue));
1550: PetscFunctionReturn(PETSC_SUCCESS);
1551: }
1553: /* Sets up the section for dofs. This routine is called during DMSetUp() */
1554: static PetscErrorCode DMNetworkVariablesSetUp(DM dm)
1555: {
1556: DM_Network *network = (DM_Network *)dm->data;
1558: PetscFunctionBegin;
1559: PetscCall(PetscSectionSetUp(network->DofSection));
1560: PetscFunctionReturn(PETSC_SUCCESS);
1561: }
1563: /* Get a subsection from a range of points */
1564: static PetscErrorCode DMNetworkGetSubSection_private(PetscSection main, PetscInt pstart, PetscInt pend, PetscSection *subsection)
1565: {
1566: PetscInt i, nvar;
1568: PetscFunctionBegin;
1569: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)main), subsection));
1570: PetscCall(PetscSectionSetChart(*subsection, 0, pend - pstart));
1571: for (i = pstart; i < pend; i++) {
1572: PetscCall(PetscSectionGetDof(main, i, &nvar));
1573: PetscCall(PetscSectionSetDof(*subsection, i - pstart, nvar));
1574: }
1576: PetscCall(PetscSectionSetUp(*subsection));
1577: PetscFunctionReturn(PETSC_SUCCESS);
1578: }
1580: /* Create a submap of points with a GlobalToLocal structure */
1581: static PetscErrorCode DMNetworkSetSubMap_private(DM dm, PetscInt pstart, PetscInt pend, ISLocalToGlobalMapping *map)
1582: {
1583: PetscInt i, *subpoints;
1585: PetscFunctionBegin;
1586: /* Create index sets to map from "points" to "subpoints" */
1587: PetscCall(PetscMalloc1(pend - pstart, &subpoints));
1588: for (i = pstart; i < pend; i++) subpoints[i - pstart] = i;
1589: PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, pend - pstart, subpoints, PETSC_COPY_VALUES, map));
1590: PetscCall(PetscFree(subpoints));
1591: PetscFunctionReturn(PETSC_SUCCESS);
1592: }
1594: /*@
1595: DMNetworkAssembleGraphStructures - Assembles vertex and edge data structures. Must be called after `DMNetworkDistribute()`
1597: Collective
1599: Input Parameter:
1600: . dm - the `DMNETWORK` Object
1602: Level: intermediate
1604: Note:
1605: The routine will create alternative orderings for the vertices and edges. Assume global
1606: network points are\:
1608: points = [0 1 2 3 4 5 6]
1610: where edges = [0,1,2,3] and vertices = [4,5,6]. The new orderings will be specific to the subset (i.e vertices = [0,1,2] <- [4,5,6]).
1612: With this new ordering a local `PetscSection`, global `PetscSection` and` PetscSF` will be created specific to the subset.
1614: .seealso: `DMNetworkDistribute()`
1615: @*/
1616: PetscErrorCode DMNetworkAssembleGraphStructures(DM dm)
1617: {
1618: MPI_Comm comm;
1619: PetscMPIInt size;
1620: DM_Network *network = (DM_Network *)dm->data;
1622: PetscFunctionBegin;
1623: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1624: PetscCallMPI(MPI_Comm_size(comm, &size));
1626: /* Create maps for vertices and edges */
1627: PetscCall(DMNetworkSetSubMap_private(dm, network->cloneshared->vStart, network->cloneshared->vEnd, &network->vertex.mapping));
1628: PetscCall(DMNetworkSetSubMap_private(dm, network->cloneshared->eStart, network->cloneshared->eEnd, &network->edge.mapping));
1630: /* Create local sub-sections */
1631: PetscCall(DMNetworkGetSubSection_private(network->DofSection, network->cloneshared->vStart, network->cloneshared->vEnd, &network->vertex.DofSection));
1632: PetscCall(DMNetworkGetSubSection_private(network->DofSection, network->cloneshared->eStart, network->cloneshared->eEnd, &network->edge.DofSection));
1634: if (size > 1) {
1635: PetscCall(PetscSFGetSubSF(network->plex->sf, network->vertex.mapping, &network->vertex.sf));
1637: PetscCall(PetscSectionCreateGlobalSection(network->vertex.DofSection, network->vertex.sf, PETSC_FALSE, PETSC_FALSE, &network->vertex.GlobalDofSection));
1638: PetscCall(PetscSFGetSubSF(network->plex->sf, network->edge.mapping, &network->edge.sf));
1639: PetscCall(PetscSectionCreateGlobalSection(network->edge.DofSection, network->edge.sf, PETSC_FALSE, PETSC_FALSE, &network->edge.GlobalDofSection));
1640: } else {
1641: /* create structures for vertex */
1642: PetscCall(PetscSectionClone(network->vertex.DofSection, &network->vertex.GlobalDofSection));
1643: /* create structures for edge */
1644: PetscCall(PetscSectionClone(network->edge.DofSection, &network->edge.GlobalDofSection));
1645: }
1647: /* Add viewers */
1648: PetscCall(PetscObjectSetName((PetscObject)network->edge.GlobalDofSection, "Global edge dof section"));
1649: PetscCall(PetscObjectSetName((PetscObject)network->vertex.GlobalDofSection, "Global vertex dof section"));
1650: PetscCall(PetscSectionViewFromOptions(network->edge.GlobalDofSection, NULL, "-edge_global_section_view"));
1651: PetscCall(PetscSectionViewFromOptions(network->vertex.GlobalDofSection, NULL, "-vertex_global_section_view"));
1652: PetscFunctionReturn(PETSC_SUCCESS);
1653: }
1655: /*
1656: Setup a lookup btable for the input v's owning subnetworks
1657: - add all owing subnetworks that connect to this v to the btable
1658: vertex_subnetid = supportingedge_subnetid
1659: */
1660: static inline PetscErrorCode SetSubnetIdLookupBT(DM dm, PetscInt v, PetscInt Nsubnet, PetscBT btable)
1661: {
1662: PetscInt e, nedges, offset;
1663: const PetscInt *edges;
1664: DM_Network *newDMnetwork = (DM_Network *)dm->data;
1665: DMNetworkComponentHeader header;
1667: PetscFunctionBegin;
1668: PetscCall(PetscBTMemzero(Nsubnet, btable));
1669: PetscCall(DMNetworkGetSupportingEdges(dm, v, &nedges, &edges));
1670: for (e = 0; e < nedges; e++) {
1671: PetscCall(PetscSectionGetOffset(newDMnetwork->DataSection, edges[e], &offset));
1672: header = (DMNetworkComponentHeader)(newDMnetwork->componentdataarray + offset);
1673: PetscCall(PetscBTSet(btable, header->subnetid));
1674: }
1675: PetscFunctionReturn(PETSC_SUCCESS);
1676: }
1678: /*
1679: DMNetworkDistributeCoordinates - Internal function to distribute the coordinate network and coordinates.
1681: Collective
1683: Input Parameters:
1684: + dm - The original `DMNETWORK` object
1685: - migrationSF - The `PetscSF` describing the migration from dm to dmnew
1686: - newDM - The new distributed dmnetwork object.
1687: */
1689: static PetscErrorCode DMNetworkDistributeCoordinates(DM dm, PetscSF migrationSF, DM newDM)
1690: {
1691: DM_Network *newDMnetwork = (DM_Network *)((newDM)->data), *newCoordnetwork, *oldCoordnetwork;
1692: DM cdm, newcdm;
1693: PetscInt cdim, bs, p, pStart, pEnd, offset;
1694: Vec oldCoord, newCoord;
1695: DMNetworkComponentHeader header;
1696: const char *name;
1698: PetscFunctionBegin;
1699: /* Distribute the coordinate network and coordinates */
1700: PetscCall(DMGetCoordinateDim(dm, &cdim));
1701: PetscCall(DMSetCoordinateDim(newDM, cdim));
1703: /* Migrate only if original network had coordinates */
1704: PetscCall(DMGetCoordinatesLocal(dm, &oldCoord));
1705: if (oldCoord) {
1706: PetscCall(DMGetCoordinateDM(dm, &cdm));
1707: PetscCall(DMGetCoordinateDM(newDM, &newcdm));
1708: newCoordnetwork = (DM_Network *)newcdm->data;
1709: oldCoordnetwork = (DM_Network *)cdm->data;
1711: PetscCall(VecCreate(PETSC_COMM_SELF, &newCoord));
1712: PetscCall(PetscObjectGetName((PetscObject)oldCoord, &name));
1713: PetscCall(PetscObjectSetName((PetscObject)newCoord, name));
1714: PetscCall(VecGetBlockSize(oldCoord, &bs));
1715: PetscCall(VecSetBlockSize(newCoord, bs));
1717: PetscCall(DMPlexDistributeField(newDMnetwork->plex, migrationSF, oldCoordnetwork->DofSection, oldCoord, newCoordnetwork->DofSection, newCoord));
1718: PetscCall(DMSetCoordinatesLocal(newDM, newCoord));
1720: PetscCall(VecDestroy(&newCoord));
1721: /* Migrate the components from the original coordinate network to the new coordinate network */
1722: PetscCall(DMPlexDistributeData(newDMnetwork->plex, migrationSF, oldCoordnetwork->DataSection, MPIU_INT, (void *)oldCoordnetwork->componentdataarray, newCoordnetwork->DataSection, (void **)&newCoordnetwork->componentdataarray));
1723: /* update the header pointers in the new coordinate network components */
1724: PetscCall(PetscSectionGetChart(newCoordnetwork->DataSection, &pStart, &pEnd));
1725: for (p = pStart; p < pEnd; p++) {
1726: PetscCall(PetscSectionGetOffset(newCoordnetwork->DataSection, p, &offset));
1727: header = (DMNetworkComponentHeader)(newCoordnetwork->componentdataarray + offset);
1728: /* Update pointers */
1729: header->size = (PetscInt *)(header + 1);
1730: header->key = header->size + header->maxcomps;
1731: header->offset = header->key + header->maxcomps;
1732: header->nvar = header->offset + header->maxcomps;
1733: header->offsetvarrel = header->nvar + header->maxcomps;
1734: }
1736: PetscCall(DMSetLocalSection(newCoordnetwork->plex, newCoordnetwork->DofSection));
1737: PetscCall(DMGetGlobalSection(newCoordnetwork->plex, &newCoordnetwork->GlobalDofSection));
1738: newCoordnetwork->componentsetup = PETSC_TRUE;
1739: }
1740: PetscFunctionReturn(PETSC_SUCCESS);
1741: }
1743: /*@
1744: DMNetworkDistribute - Distributes the network and moves associated component data
1746: Collective
1748: Input Parameters:
1749: + dm - the `DMNETWORK` object
1750: - overlap - the overlap of partitions, 0 is the default
1752: Options Database Keys:
1753: + -dmnetwork_view - Calls `DMView()` at the conclusion of `DMSetUp()`
1754: . -dmnetwork_view_distributed - Calls `DMView()` at the conclusion of `DMNetworkDistribute()`
1755: . -dmnetwork_view_tmpdir - Sets the temporary directory to use when viewing with the `draw` option
1756: . -dmnetwork_view_all_ranks - Displays all of the subnetworks for each MPI rank
1757: . -dmnetwork_view_rank_range - Displays the subnetworks for the ranks in a comma-separated list
1758: . -dmnetwork_view_no_vertices - Disables displaying the vertices in the network visualization
1759: - -dmnetwork_view_no_numbering - Disables displaying the numbering of edges and vertices in the network visualization
1761: Level: intermediate
1763: Note:
1764: Distributes the network with <overlap>-overlapping partitioning of the edges.
1766: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`
1767: @*/
1768: PetscErrorCode DMNetworkDistribute(DM *dm, PetscInt overlap)
1769: {
1770: MPI_Comm comm;
1771: PetscMPIInt size;
1772: DM_Network *oldDMnetwork = (DM_Network *)((*dm)->data), *newDMnetwork;
1773: PetscSF pointsf = NULL;
1774: DM newDM;
1775: PetscInt j, e, v, offset, *subnetvtx, *subnetedge, Nsubnet, gidx, svtx_idx, nv, net;
1776: PetscInt *sv;
1777: PetscBT btable;
1778: PetscPartitioner part;
1779: DMNetworkComponentHeader header;
1781: PetscFunctionBegin;
1782: PetscAssertPointer(dm, 1);
1784: PetscCall(PetscObjectGetComm((PetscObject)*dm, &comm));
1785: PetscCallMPI(MPI_Comm_size(comm, &size));
1786: if (size == 1) {
1787: oldDMnetwork->cloneshared->distributecalled = PETSC_TRUE;
1788: PetscFunctionReturn(PETSC_SUCCESS);
1789: }
1791: PetscCheck(!overlap, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "overlap %" PetscInt_FMT " != 0 is not supported yet", overlap);
1793: /* This routine moves the component data to the appropriate processors. It makes use of the DataSection and the componentdataarray to move the component data to appropriate processors and returns a new DataSection and new componentdataarray. */
1794: PetscCall(DMNetworkCreate(PetscObjectComm((PetscObject)*dm), &newDM));
1795: PetscCall(PetscLogEventBegin(DMNetwork_Distribute, newDM, 0, 0, 0));
1796: newDMnetwork = (DM_Network *)newDM->data;
1797: newDMnetwork->max_comps_registered = oldDMnetwork->max_comps_registered;
1798: PetscCall(PetscMalloc1(newDMnetwork->max_comps_registered, &newDMnetwork->component));
1800: /* Enable runtime options for petscpartitioner */
1801: PetscCall(DMPlexGetPartitioner(oldDMnetwork->plex, &part));
1802: PetscCall(PetscPartitionerSetFromOptions(part));
1804: /* Distribute plex dm */
1805: PetscCall(DMPlexDistribute(oldDMnetwork->plex, overlap, &pointsf, &newDMnetwork->plex));
1807: /* Distribute dof section */
1808: PetscCall(PetscSectionCreate(comm, &newDMnetwork->DofSection));
1809: PetscCall(PetscSFDistributeSection(pointsf, oldDMnetwork->DofSection, NULL, newDMnetwork->DofSection));
1811: /* Distribute data and associated section */
1812: PetscCall(PetscSectionCreate(comm, &newDMnetwork->DataSection));
1813: PetscCall(DMPlexDistributeData(newDMnetwork->plex, pointsf, oldDMnetwork->DataSection, MPIU_INT, (void *)oldDMnetwork->componentdataarray, newDMnetwork->DataSection, (void **)&newDMnetwork->componentdataarray));
1815: PetscCall(PetscSectionGetChart(newDMnetwork->DataSection, &newDMnetwork->cloneshared->pStart, &newDMnetwork->cloneshared->pEnd));
1816: PetscCall(DMPlexGetHeightStratum(newDMnetwork->plex, 0, &newDMnetwork->cloneshared->eStart, &newDMnetwork->cloneshared->eEnd));
1817: PetscCall(DMPlexGetHeightStratum(newDMnetwork->plex, 1, &newDMnetwork->cloneshared->vStart, &newDMnetwork->cloneshared->vEnd));
1818: newDMnetwork->cloneshared->nEdges = newDMnetwork->cloneshared->eEnd - newDMnetwork->cloneshared->eStart;
1819: newDMnetwork->cloneshared->nVertices = newDMnetwork->cloneshared->vEnd - newDMnetwork->cloneshared->vStart;
1820: newDMnetwork->cloneshared->NVertices = oldDMnetwork->cloneshared->NVertices;
1821: newDMnetwork->cloneshared->NEdges = oldDMnetwork->cloneshared->NEdges;
1822: newDMnetwork->cloneshared->svtable = oldDMnetwork->cloneshared->svtable; /* global table! */
1823: oldDMnetwork->cloneshared->svtable = NULL;
1825: /* Set Dof section as the section for dm */
1826: PetscCall(DMSetLocalSection(newDMnetwork->plex, newDMnetwork->DofSection));
1827: PetscCall(DMGetGlobalSection(newDMnetwork->plex, &newDMnetwork->GlobalDofSection));
1829: /* Setup subnetwork info in the newDM */
1830: newDMnetwork->cloneshared->Nsubnet = oldDMnetwork->cloneshared->Nsubnet;
1831: newDMnetwork->cloneshared->Nsvtx = oldDMnetwork->cloneshared->Nsvtx;
1832: oldDMnetwork->cloneshared->Nsvtx = 0;
1833: newDMnetwork->cloneshared->svtx = oldDMnetwork->cloneshared->svtx; /* global vertices! */
1834: oldDMnetwork->cloneshared->svtx = NULL;
1835: PetscCall(PetscCalloc1(newDMnetwork->cloneshared->Nsubnet, &newDMnetwork->cloneshared->subnet));
1837: /* Copy over the global number of vertices and edges in each subnetwork.
1838: Note: these are calculated in DMNetworkLayoutSetUp()
1839: */
1840: Nsubnet = newDMnetwork->cloneshared->Nsubnet;
1841: for (j = 0; j < Nsubnet; j++) {
1842: newDMnetwork->cloneshared->subnet[j].Nvtx = oldDMnetwork->cloneshared->subnet[j].Nvtx;
1843: newDMnetwork->cloneshared->subnet[j].Nedge = oldDMnetwork->cloneshared->subnet[j].Nedge;
1844: }
1846: /* Count local nedges for subnetworks */
1847: for (e = newDMnetwork->cloneshared->eStart; e < newDMnetwork->cloneshared->eEnd; e++) {
1848: PetscCall(PetscSectionGetOffset(newDMnetwork->DataSection, e, &offset));
1849: header = (DMNetworkComponentHeader)(newDMnetwork->componentdataarray + offset);
1851: /* Update pointers */
1852: header->size = (PetscInt *)(header + 1);
1853: header->key = header->size + header->maxcomps;
1854: header->offset = header->key + header->maxcomps;
1855: header->nvar = header->offset + header->maxcomps;
1856: header->offsetvarrel = header->nvar + header->maxcomps;
1858: newDMnetwork->cloneshared->subnet[header->subnetid].nedge++;
1859: }
1861: /* Setup a btable to keep track subnetworks owned by this process at a shared vertex */
1862: if (newDMnetwork->cloneshared->Nsvtx) PetscCall(PetscBTCreate(Nsubnet, &btable));
1864: /* Count local nvtx for subnetworks */
1865: for (v = newDMnetwork->cloneshared->vStart; v < newDMnetwork->cloneshared->vEnd; v++) {
1866: PetscCall(PetscSectionGetOffset(newDMnetwork->DataSection, v, &offset));
1867: header = (DMNetworkComponentHeader)(newDMnetwork->componentdataarray + offset);
1869: /* Update pointers */
1870: header->size = (PetscInt *)(header + 1);
1871: header->key = header->size + header->maxcomps;
1872: header->offset = header->key + header->maxcomps;
1873: header->nvar = header->offset + header->maxcomps;
1874: header->offsetvarrel = header->nvar + header->maxcomps;
1876: /* shared vertices: use gidx=header->index to check if v is a shared vertex */
1877: gidx = header->index;
1878: PetscCall(PetscHMapIGetWithDefault(newDMnetwork->cloneshared->svtable, gidx + 1, 0, &svtx_idx));
1879: svtx_idx--;
1881: if (svtx_idx < 0) { /* not a shared vertex */
1882: newDMnetwork->cloneshared->subnet[header->subnetid].nvtx++;
1883: } else { /* a shared vertex belongs to more than one subnetworks, it is being counted by multiple subnets */
1884: /* Setup a lookup btable for this v's owning subnetworks */
1885: PetscCall(SetSubnetIdLookupBT(newDM, v, Nsubnet, btable));
1887: for (j = 0; j < newDMnetwork->cloneshared->svtx[svtx_idx].n; j++) {
1888: sv = newDMnetwork->cloneshared->svtx[svtx_idx].sv + 2 * j;
1889: net = sv[0];
1890: if (PetscBTLookup(btable, net)) newDMnetwork->cloneshared->subnet[net].nvtx++; /* sv is on net owned by this process */
1891: }
1892: }
1893: }
1895: /* Get total local nvtx for subnetworks */
1896: nv = 0;
1897: for (j = 0; j < Nsubnet; j++) nv += newDMnetwork->cloneshared->subnet[j].nvtx;
1898: nv += newDMnetwork->cloneshared->Nsvtx;
1900: /* Now create the vertices and edge arrays for the subnetworks */
1901: PetscCall(PetscCalloc2(newDMnetwork->cloneshared->nEdges, &subnetedge, nv, &subnetvtx)); /* Maps local vertex to local subnetwork's vertex */
1902: newDMnetwork->cloneshared->subnetedge = subnetedge;
1903: newDMnetwork->cloneshared->subnetvtx = subnetvtx;
1904: for (j = 0; j < newDMnetwork->cloneshared->Nsubnet; j++) {
1905: newDMnetwork->cloneshared->subnet[j].edges = subnetedge;
1906: subnetedge += newDMnetwork->cloneshared->subnet[j].nedge;
1908: newDMnetwork->cloneshared->subnet[j].vertices = subnetvtx;
1909: subnetvtx += newDMnetwork->cloneshared->subnet[j].nvtx;
1911: /* Temporarily setting nvtx and nedge to 0 so we can use them as counters in the below for loop. These get updated when the vertices and edges are added. */
1912: newDMnetwork->cloneshared->subnet[j].nvtx = newDMnetwork->cloneshared->subnet[j].nedge = 0;
1913: }
1914: newDMnetwork->cloneshared->svertices = subnetvtx;
1916: /* Set the edges and vertices in each subnetwork */
1917: for (e = newDMnetwork->cloneshared->eStart; e < newDMnetwork->cloneshared->eEnd; e++) {
1918: PetscCall(PetscSectionGetOffset(newDMnetwork->DataSection, e, &offset));
1919: header = (DMNetworkComponentHeader)(newDMnetwork->componentdataarray + offset);
1920: newDMnetwork->cloneshared->subnet[header->subnetid].edges[newDMnetwork->cloneshared->subnet[header->subnetid].nedge++] = e;
1921: }
1923: nv = 0;
1924: for (v = newDMnetwork->cloneshared->vStart; v < newDMnetwork->cloneshared->vEnd; v++) {
1925: PetscCall(PetscSectionGetOffset(newDMnetwork->DataSection, v, &offset));
1926: header = (DMNetworkComponentHeader)(newDMnetwork->componentdataarray + offset);
1928: /* coupling vertices: use gidx = header->index to check if v is a coupling vertex */
1929: PetscCall(PetscHMapIGetWithDefault(newDMnetwork->cloneshared->svtable, header->index + 1, 0, &svtx_idx));
1930: svtx_idx--;
1931: if (svtx_idx < 0) {
1932: newDMnetwork->cloneshared->subnet[header->subnetid].vertices[newDMnetwork->cloneshared->subnet[header->subnetid].nvtx++] = v;
1933: } else { /* a shared vertex */
1934: newDMnetwork->cloneshared->svertices[nv++] = v;
1936: /* Setup a lookup btable for this v's owning subnetworks */
1937: PetscCall(SetSubnetIdLookupBT(newDM, v, Nsubnet, btable));
1939: for (j = 0; j < newDMnetwork->cloneshared->svtx[svtx_idx].n; j++) {
1940: sv = newDMnetwork->cloneshared->svtx[svtx_idx].sv + 2 * j;
1941: net = sv[0];
1942: if (PetscBTLookup(btable, net)) newDMnetwork->cloneshared->subnet[net].vertices[newDMnetwork->cloneshared->subnet[net].nvtx++] = v;
1943: }
1944: }
1945: }
1946: newDMnetwork->cloneshared->nsvtx = nv; /* num of local shared vertices */
1948: PetscCall(DMNetworkDistributeCoordinates(*dm, pointsf, newDM));
1949: newDM->setupcalled = (*dm)->setupcalled;
1950: newDMnetwork->cloneshared->distributecalled = PETSC_TRUE;
1952: /* Free spaces */
1953: PetscCall(PetscSFDestroy(&pointsf));
1954: PetscCall(DMDestroy(dm));
1955: if (newDMnetwork->cloneshared->Nsvtx) PetscCall(PetscBTDestroy(&btable));
1956: PetscCall(PetscLogEventEnd(DMNetwork_Distribute, newDM, 0, 0, 0));
1958: /* View distributed dmnetwork */
1959: PetscCall(DMViewFromOptions(newDM, NULL, "-dmnetwork_view_distributed"));
1961: *dm = newDM;
1962: PetscFunctionReturn(PETSC_SUCCESS);
1963: }
1965: /*@C
1966: PetscSFGetSubSF - Returns an `PetscSF` for a specific subset of points. Leaves are re-numbered to reflect the new ordering
1968: Collective
1970: Input Parameters:
1971: + mainsf - `PetscSF` structure
1972: - map - a `ISLocalToGlobalMapping` that contains the subset of points
1974: Output Parameter:
1975: . subSF - a subset of the `mainSF` for the desired subset.
1977: Level: intermediate
1979: .seealso: `PetscSF`
1980: @*/
1981: PetscErrorCode PetscSFGetSubSF(PetscSF mainsf, ISLocalToGlobalMapping map, PetscSF *subSF)
1982: {
1983: PetscInt nroots, nleaves, *ilocal_sub;
1984: PetscInt i, *ilocal_map, nroots_sub, nleaves_sub = 0;
1985: PetscInt *local_points, *remote_points;
1986: PetscSFNode *iremote_sub;
1987: const PetscInt *ilocal;
1988: const PetscSFNode *iremote;
1990: PetscFunctionBegin;
1991: PetscCall(PetscSFGetGraph(mainsf, &nroots, &nleaves, &ilocal, &iremote));
1993: /* Look for leaves that pertain to the subset of points. Get the local ordering */
1994: PetscCall(PetscMalloc1(nleaves, &ilocal_map));
1995: PetscCall(ISGlobalToLocalMappingApply(map, IS_GTOLM_MASK, nleaves, ilocal, NULL, ilocal_map));
1996: for (i = 0; i < nleaves; i++) {
1997: if (ilocal_map[i] != -1) nleaves_sub += 1;
1998: }
1999: /* Re-number ilocal with subset numbering. Need information from roots */
2000: PetscCall(PetscMalloc2(nroots, &local_points, nroots, &remote_points));
2001: for (i = 0; i < nroots; i++) local_points[i] = i;
2002: PetscCall(ISGlobalToLocalMappingApply(map, IS_GTOLM_MASK, nroots, local_points, NULL, local_points));
2003: PetscCall(PetscSFBcastBegin(mainsf, MPIU_INT, local_points, remote_points, MPI_REPLACE));
2004: PetscCall(PetscSFBcastEnd(mainsf, MPIU_INT, local_points, remote_points, MPI_REPLACE));
2005: /* Fill up graph using local (that is, local to the subset) numbering. */
2006: PetscCall(PetscMalloc1(nleaves_sub, &ilocal_sub));
2007: PetscCall(PetscMalloc1(nleaves_sub, &iremote_sub));
2008: nleaves_sub = 0;
2009: for (i = 0; i < nleaves; i++) {
2010: if (ilocal_map[i] != -1) {
2011: ilocal_sub[nleaves_sub] = ilocal_map[i];
2012: iremote_sub[nleaves_sub].rank = iremote[i].rank;
2013: iremote_sub[nleaves_sub].index = remote_points[ilocal[i]];
2014: nleaves_sub += 1;
2015: }
2016: }
2017: PetscCall(PetscFree2(local_points, remote_points));
2018: PetscCall(ISLocalToGlobalMappingGetSize(map, &nroots_sub));
2020: /* Create new subSF */
2021: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)mainsf), subSF));
2022: PetscCall(PetscSFSetFromOptions(*subSF));
2023: PetscCall(PetscSFSetGraph(*subSF, nroots_sub, nleaves_sub, ilocal_sub, PETSC_OWN_POINTER, iremote_sub, PETSC_COPY_VALUES));
2024: PetscCall(PetscFree(ilocal_map));
2025: PetscCall(PetscFree(iremote_sub));
2026: PetscFunctionReturn(PETSC_SUCCESS);
2027: }
2029: /*@C
2030: DMNetworkGetSupportingEdges - Return the supporting edges for this vertex point
2032: Not Collective
2034: Input Parameters:
2035: + dm - the `DMNETWORK` object
2036: - vertex - the vertex point
2038: Output Parameters:
2039: + nedges - number of edges connected to this vertex point
2040: - edges - list of edge points
2042: Level: beginner
2044: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkGetConnectedVertices()`
2045: @*/
2046: PetscErrorCode DMNetworkGetSupportingEdges(DM dm, PetscInt vertex, PetscInt *nedges, const PetscInt *edges[])
2047: {
2048: DM_Network *network = (DM_Network *)dm->data;
2050: PetscFunctionBegin;
2051: PetscCall(DMPlexGetSupportSize(network->plex, vertex, nedges));
2052: if (edges) PetscCall(DMPlexGetSupport(network->plex, vertex, edges));
2053: PetscFunctionReturn(PETSC_SUCCESS);
2054: }
2056: /*@C
2057: DMNetworkGetConnectedVertices - Return the connected vertices for this edge point
2059: Not Collective
2061: Input Parameters:
2062: + dm - the `DMNETWORK` object
2063: - edge - the edge point
2065: Output Parameter:
2066: . vertices - vertices connected to this edge
2068: Level: beginner
2070: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkGetSupportingEdges()`
2071: @*/
2072: PetscErrorCode DMNetworkGetConnectedVertices(DM dm, PetscInt edge, const PetscInt *vertices[])
2073: {
2074: DM_Network *network = (DM_Network *)dm->data;
2076: PetscFunctionBegin;
2077: PetscCall(DMPlexGetCone(network->plex, edge, vertices));
2078: PetscFunctionReturn(PETSC_SUCCESS);
2079: }
2081: /*@
2082: DMNetworkIsSharedVertex - Returns `PETSC_TRUE` if the vertex is shared by subnetworks
2084: Not Collective
2086: Input Parameters:
2087: + dm - the `DMNETWORK` object
2088: - p - the vertex point
2090: Output Parameter:
2091: . flag - `PETSC_TRUE` if the vertex is shared by subnetworks
2093: Level: beginner
2095: .seealso: `DM`, `DMNETWORK`, `DMNetworkAddSharedVertices()`, `DMNetworkIsGhostVertex()`
2096: @*/
2097: PetscErrorCode DMNetworkIsSharedVertex(DM dm, PetscInt p, PetscBool *flag)
2098: {
2099: PetscFunctionBegin;
2101: PetscAssertPointer(flag, 3);
2102: if (dm->setupcalled) { /* DMNetworkGetGlobalVertexIndex() requires DMSetUp() be called */
2103: DM_Network *network = (DM_Network *)dm->data;
2104: PetscInt gidx;
2106: PetscCall(DMNetworkGetGlobalVertexIndex(dm, p, &gidx));
2107: PetscCall(PetscHMapIHas(network->cloneshared->svtable, gidx + 1, flag));
2108: } else { /* would be removed? */
2109: PetscInt nv;
2110: const PetscInt *vtx;
2112: PetscCall(DMNetworkGetSharedVertices(dm, &nv, &vtx));
2113: for (PetscInt i = 0; i < nv; i++) {
2114: if (p == vtx[i]) {
2115: *flag = PETSC_TRUE;
2116: PetscFunctionReturn(PETSC_SUCCESS);
2117: }
2118: }
2119: *flag = PETSC_FALSE;
2120: }
2121: PetscFunctionReturn(PETSC_SUCCESS);
2122: }
2124: /*@
2125: DMNetworkIsGhostVertex - Returns `PETSC_TRUE` if the vertex is a ghost vertex
2127: Not Collective
2129: Input Parameters:
2130: + dm - the `DMNETWORK` object
2131: - p - the vertex point
2133: Output Parameter:
2134: . isghost - `PETSC_TRUE` if the vertex is a ghost point
2136: Level: beginner
2138: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetConnectedVertices()`, `DMNetworkGetVertexRange()`, `DMNetworkIsSharedVertex()`
2139: @*/
2140: PetscErrorCode DMNetworkIsGhostVertex(DM dm, PetscInt p, PetscBool *isghost)
2141: {
2142: DM_Network *network = (DM_Network *)dm->data;
2143: PetscInt offsetg;
2144: PetscSection sectiong;
2146: PetscFunctionBegin;
2147: *isghost = PETSC_FALSE;
2148: PetscCall(DMGetGlobalSection(network->plex, §iong));
2149: PetscCall(PetscSectionGetOffset(sectiong, p, &offsetg));
2150: if (offsetg < 0) *isghost = PETSC_TRUE;
2151: PetscFunctionReturn(PETSC_SUCCESS);
2152: }
2154: PetscErrorCode DMSetUp_Network(DM dm)
2155: {
2156: PetscFunctionBegin;
2157: PetscCall(PetscLogEventBegin(DMNetwork_SetUpNetwork, dm, 0, 0, 0));
2158: PetscCall(DMNetworkFinalizeComponents(dm));
2159: /* View dmnetwork */
2160: PetscCall(DMViewFromOptions(dm, NULL, "-dmnetwork_view"));
2161: PetscCall(PetscLogEventEnd(DMNetwork_SetUpNetwork, dm, 0, 0, 0));
2162: PetscFunctionReturn(PETSC_SUCCESS);
2163: }
2165: /*@
2166: DMNetworkHasJacobian - Sets global flag for using user's sub Jacobian matrices
2167: -- replaced by DMNetworkSetOption(network,userjacobian,PETSC_TRUE)?
2169: Collective
2171: Input Parameters:
2172: + dm - the `DMNETWORK` object
2173: . eflg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`) if user provides Jacobian for edges
2174: - vflg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`) if user provides Jacobian for vertices
2176: Level: intermediate
2178: .seealso: `DMNetworkSetOption()`
2179: @*/
2180: PetscErrorCode DMNetworkHasJacobian(DM dm, PetscBool eflg, PetscBool vflg)
2181: {
2182: DM_Network *network = (DM_Network *)dm->data;
2183: PetscInt nVertices = network->cloneshared->nVertices;
2185: PetscFunctionBegin;
2186: network->userEdgeJacobian = eflg;
2187: network->userVertexJacobian = vflg;
2189: if (eflg && !network->Je) PetscCall(PetscCalloc1(3 * network->cloneshared->nEdges, &network->Je));
2191: if (vflg && !network->Jv && nVertices) {
2192: PetscInt i, *vptr, nedges, vStart = network->cloneshared->vStart;
2193: PetscInt nedges_total;
2194: const PetscInt *edges;
2196: /* count nvertex_total */
2197: nedges_total = 0;
2198: PetscCall(PetscMalloc1(nVertices + 1, &vptr));
2200: vptr[0] = 0;
2201: for (i = 0; i < nVertices; i++) {
2202: PetscCall(DMNetworkGetSupportingEdges(dm, i + vStart, &nedges, &edges));
2203: nedges_total += nedges;
2204: vptr[i + 1] = vptr[i] + 2 * nedges + 1;
2205: }
2207: PetscCall(PetscCalloc1(2 * nedges_total + nVertices, &network->Jv));
2208: network->Jvptr = vptr;
2209: }
2210: PetscFunctionReturn(PETSC_SUCCESS);
2211: }
2213: /*@
2214: DMNetworkEdgeSetMatrix - Sets user-provided Jacobian matrices for this edge to the network
2216: Not Collective
2218: Input Parameters:
2219: + dm - the `DMNETWORK` object
2220: . p - the edge point
2221: - J - array (size = 3) of Jacobian submatrices for this edge point:
2222: J[0]: this edge
2223: J[1] and J[2]: connected vertices, obtained by calling `DMNetworkGetConnectedVertices()`
2225: Level: advanced
2227: .seealso: `DM`, `DMNETWORK`, `DMNetworkVertexSetMatrix()`
2228: @*/
2229: PetscErrorCode DMNetworkEdgeSetMatrix(DM dm, PetscInt p, Mat J[])
2230: {
2231: DM_Network *network = (DM_Network *)dm->data;
2233: PetscFunctionBegin;
2234: PetscCheck(network->Je, PetscObjectComm((PetscObject)dm), PETSC_ERR_ORDER, "Must call DMNetworkHasJacobian() collectively before calling DMNetworkEdgeSetMatrix");
2236: if (J) {
2237: network->Je[3 * p] = J[0];
2238: network->Je[3 * p + 1] = J[1];
2239: network->Je[3 * p + 2] = J[2];
2240: }
2241: PetscFunctionReturn(PETSC_SUCCESS);
2242: }
2244: /*@
2245: DMNetworkVertexSetMatrix - Sets user-provided Jacobian matrix for this vertex to the network
2247: Not Collective
2249: Input Parameters:
2250: + dm - The `DMNETWORK` object
2251: . p - the vertex point
2252: - J - array of Jacobian (size = 2*(num of supporting edges) + 1) submatrices for this vertex point:
2253: J[0]: this vertex
2254: J[1+2*i]: i-th supporting edge
2255: J[1+2*i+1]: i-th connected vertex
2257: Level: advanced
2259: .seealso: `DM`, `DMNETWORK`, `DMNetworkEdgeSetMatrix()`
2260: @*/
2261: PetscErrorCode DMNetworkVertexSetMatrix(DM dm, PetscInt p, Mat J[])
2262: {
2263: DM_Network *network = (DM_Network *)dm->data;
2264: PetscInt i, *vptr, nedges, vStart = network->cloneshared->vStart;
2265: const PetscInt *edges;
2267: PetscFunctionBegin;
2268: PetscCheck(network->Jv, PetscObjectComm((PetscObject)dm), PETSC_ERR_ORDER, "Must call DMNetworkHasJacobian() collectively before calling DMNetworkVertexSetMatrix");
2270: if (J) {
2271: vptr = network->Jvptr;
2272: network->Jv[vptr[p - vStart]] = J[0]; /* Set Jacobian for this vertex */
2274: /* Set Jacobian for each supporting edge and connected vertex */
2275: PetscCall(DMNetworkGetSupportingEdges(dm, p, &nedges, &edges));
2276: for (i = 1; i <= 2 * nedges; i++) network->Jv[vptr[p - vStart] + i] = J[i];
2277: }
2278: PetscFunctionReturn(PETSC_SUCCESS);
2279: }
2281: static inline PetscErrorCode MatSetPreallocationDenseblock_private(PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscBool ghost, Vec vdnz, Vec vonz)
2282: {
2283: PetscInt j;
2284: PetscScalar val = (PetscScalar)ncols;
2286: PetscFunctionBegin;
2287: if (!ghost) {
2288: for (j = 0; j < nrows; j++) PetscCall(VecSetValues(vdnz, 1, &rows[j], &val, ADD_VALUES));
2289: } else {
2290: for (j = 0; j < nrows; j++) PetscCall(VecSetValues(vonz, 1, &rows[j], &val, ADD_VALUES));
2291: }
2292: PetscFunctionReturn(PETSC_SUCCESS);
2293: }
2295: static inline PetscErrorCode MatSetPreallocationUserblock_private(Mat Ju, PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscBool ghost, Vec vdnz, Vec vonz)
2296: {
2297: PetscInt j, ncols_u;
2298: PetscScalar val;
2300: PetscFunctionBegin;
2301: if (!ghost) {
2302: for (j = 0; j < nrows; j++) {
2303: PetscCall(MatGetRow(Ju, j, &ncols_u, NULL, NULL));
2304: val = (PetscScalar)ncols_u;
2305: PetscCall(VecSetValues(vdnz, 1, &rows[j], &val, ADD_VALUES));
2306: PetscCall(MatRestoreRow(Ju, j, &ncols_u, NULL, NULL));
2307: }
2308: } else {
2309: for (j = 0; j < nrows; j++) {
2310: PetscCall(MatGetRow(Ju, j, &ncols_u, NULL, NULL));
2311: val = (PetscScalar)ncols_u;
2312: PetscCall(VecSetValues(vonz, 1, &rows[j], &val, ADD_VALUES));
2313: PetscCall(MatRestoreRow(Ju, j, &ncols_u, NULL, NULL));
2314: }
2315: }
2316: PetscFunctionReturn(PETSC_SUCCESS);
2317: }
2319: static inline PetscErrorCode MatSetPreallocationblock_private(Mat Ju, PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscBool ghost, Vec vdnz, Vec vonz)
2320: {
2321: PetscFunctionBegin;
2322: if (Ju) {
2323: PetscCall(MatSetPreallocationUserblock_private(Ju, nrows, rows, ncols, ghost, vdnz, vonz));
2324: } else {
2325: PetscCall(MatSetPreallocationDenseblock_private(nrows, rows, ncols, ghost, vdnz, vonz));
2326: }
2327: PetscFunctionReturn(PETSC_SUCCESS);
2328: }
2330: static inline PetscErrorCode MatSetDenseblock_private(PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscInt cstart, Mat *J)
2331: {
2332: PetscInt j, *cols;
2333: PetscScalar *zeros;
2335: PetscFunctionBegin;
2336: PetscCall(PetscCalloc2(ncols, &cols, nrows * ncols, &zeros));
2337: for (j = 0; j < ncols; j++) cols[j] = j + cstart;
2338: PetscCall(MatSetValues(*J, nrows, rows, ncols, cols, zeros, INSERT_VALUES));
2339: PetscCall(PetscFree2(cols, zeros));
2340: PetscFunctionReturn(PETSC_SUCCESS);
2341: }
2343: static inline PetscErrorCode MatSetUserblock_private(Mat Ju, PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscInt cstart, Mat *J)
2344: {
2345: PetscInt j, M, N, row, col, ncols_u;
2346: const PetscInt *cols;
2347: PetscScalar zero = 0.0;
2349: PetscFunctionBegin;
2350: PetscCall(MatGetSize(Ju, &M, &N));
2351: PetscCheck(nrows == M && ncols == N, PetscObjectComm((PetscObject)Ju), PETSC_ERR_USER, "%" PetscInt_FMT " by %" PetscInt_FMT " must equal %" PetscInt_FMT " by %" PetscInt_FMT, nrows, ncols, M, N);
2353: for (row = 0; row < nrows; row++) {
2354: PetscCall(MatGetRow(Ju, row, &ncols_u, &cols, NULL));
2355: for (j = 0; j < ncols_u; j++) {
2356: col = cols[j] + cstart;
2357: PetscCall(MatSetValues(*J, 1, &rows[row], 1, &col, &zero, INSERT_VALUES));
2358: }
2359: PetscCall(MatRestoreRow(Ju, row, &ncols_u, &cols, NULL));
2360: }
2361: PetscFunctionReturn(PETSC_SUCCESS);
2362: }
2364: static inline PetscErrorCode MatSetblock_private(Mat Ju, PetscInt nrows, PetscInt *rows, PetscInt ncols, PetscInt cstart, Mat *J)
2365: {
2366: PetscFunctionBegin;
2367: if (Ju) {
2368: PetscCall(MatSetUserblock_private(Ju, nrows, rows, ncols, cstart, J));
2369: } else {
2370: PetscCall(MatSetDenseblock_private(nrows, rows, ncols, cstart, J));
2371: }
2372: PetscFunctionReturn(PETSC_SUCCESS);
2373: }
2375: /* Creates a GlobalToLocal mapping with a Local and Global section. This is akin to the routine DMGetLocalToGlobalMapping but without the need of providing a dm.
2376: */
2377: static PetscErrorCode CreateSubGlobalToLocalMapping_private(PetscSection globalsec, PetscSection localsec, ISLocalToGlobalMapping *ltog)
2378: {
2379: PetscInt i, size, dof;
2380: PetscInt *glob2loc;
2382: PetscFunctionBegin;
2383: PetscCall(PetscSectionGetStorageSize(localsec, &size));
2384: PetscCall(PetscMalloc1(size, &glob2loc));
2386: for (i = 0; i < size; i++) {
2387: PetscCall(PetscSectionGetOffset(globalsec, i, &dof));
2388: dof = (dof >= 0) ? dof : -(dof + 1);
2389: glob2loc[i] = dof;
2390: }
2392: PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)globalsec), 1, size, glob2loc, PETSC_OWN_POINTER, ltog));
2393: #if 0
2394: PetscCall(PetscIntView(size,glob2loc,PETSC_VIEWER_STDOUT_WORLD));
2395: #endif
2396: PetscFunctionReturn(PETSC_SUCCESS);
2397: }
2399: #include <petsc/private/matimpl.h>
2401: static PetscErrorCode DMCreateMatrix_Network_Nest(DM dm, Mat *J)
2402: {
2403: DM_Network *network = (DM_Network *)dm->data;
2404: PetscInt eDof, vDof;
2405: Mat j11, j12, j21, j22, bA[2][2];
2406: MPI_Comm comm;
2407: ISLocalToGlobalMapping eISMap, vISMap;
2409: PetscFunctionBegin;
2410: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2412: PetscCall(PetscSectionGetConstrainedStorageSize(network->edge.GlobalDofSection, &eDof));
2413: PetscCall(PetscSectionGetConstrainedStorageSize(network->vertex.GlobalDofSection, &vDof));
2415: PetscCall(MatCreate(comm, &j11));
2416: PetscCall(MatSetSizes(j11, eDof, eDof, PETSC_DETERMINE, PETSC_DETERMINE));
2417: PetscCall(MatSetType(j11, MATMPIAIJ));
2419: PetscCall(MatCreate(comm, &j12));
2420: PetscCall(MatSetSizes(j12, eDof, vDof, PETSC_DETERMINE, PETSC_DETERMINE));
2421: PetscCall(MatSetType(j12, MATMPIAIJ));
2423: PetscCall(MatCreate(comm, &j21));
2424: PetscCall(MatSetSizes(j21, vDof, eDof, PETSC_DETERMINE, PETSC_DETERMINE));
2425: PetscCall(MatSetType(j21, MATMPIAIJ));
2427: PetscCall(MatCreate(comm, &j22));
2428: PetscCall(MatSetSizes(j22, vDof, vDof, PETSC_DETERMINE, PETSC_DETERMINE));
2429: PetscCall(MatSetType(j22, MATMPIAIJ));
2431: bA[0][0] = j11;
2432: bA[0][1] = j12;
2433: bA[1][0] = j21;
2434: bA[1][1] = j22;
2436: PetscCall(CreateSubGlobalToLocalMapping_private(network->edge.GlobalDofSection, network->edge.DofSection, &eISMap));
2437: PetscCall(CreateSubGlobalToLocalMapping_private(network->vertex.GlobalDofSection, network->vertex.DofSection, &vISMap));
2439: PetscCall(MatSetLocalToGlobalMapping(j11, eISMap, eISMap));
2440: PetscCall(MatSetLocalToGlobalMapping(j12, eISMap, vISMap));
2441: PetscCall(MatSetLocalToGlobalMapping(j21, vISMap, eISMap));
2442: PetscCall(MatSetLocalToGlobalMapping(j22, vISMap, vISMap));
2444: PetscCall(MatSetUp(j11));
2445: PetscCall(MatSetUp(j12));
2446: PetscCall(MatSetUp(j21));
2447: PetscCall(MatSetUp(j22));
2449: PetscCall(MatCreateNest(comm, 2, NULL, 2, NULL, &bA[0][0], J));
2450: PetscCall(MatSetUp(*J));
2451: PetscCall(MatNestSetVecType(*J, VECNEST));
2452: PetscCall(MatDestroy(&j11));
2453: PetscCall(MatDestroy(&j12));
2454: PetscCall(MatDestroy(&j21));
2455: PetscCall(MatDestroy(&j22));
2457: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
2458: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
2459: PetscCall(MatSetOption(*J, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
2461: /* Free structures */
2462: PetscCall(ISLocalToGlobalMappingDestroy(&eISMap));
2463: PetscCall(ISLocalToGlobalMappingDestroy(&vISMap));
2464: PetscFunctionReturn(PETSC_SUCCESS);
2465: }
2467: PetscErrorCode DMCreateMatrix_Network(DM dm, Mat *J)
2468: {
2469: DM_Network *network = (DM_Network *)dm->data;
2470: PetscInt eStart, eEnd, vStart, vEnd, rstart, nrows, *rows, localSize;
2471: PetscInt cstart, ncols, j, e, v;
2472: PetscBool ghost, ghost_vc, ghost2, isNest;
2473: Mat Juser;
2474: PetscSection sectionGlobal;
2475: PetscInt nedges, *vptr = NULL, vc, *rows_v; /* suppress maybe-uninitialized warning */
2476: const PetscInt *edges, *cone;
2477: MPI_Comm comm;
2478: MatType mtype;
2479: Vec vd_nz, vo_nz;
2480: PetscInt *dnnz, *onnz;
2481: PetscScalar *vdnz, *vonz;
2483: PetscFunctionBegin;
2484: mtype = dm->mattype;
2485: PetscCall(PetscStrcmp(mtype, MATNEST, &isNest));
2486: if (isNest) {
2487: PetscCall(DMCreateMatrix_Network_Nest(dm, J));
2488: PetscCall(MatSetDM(*J, dm));
2489: PetscFunctionReturn(PETSC_SUCCESS);
2490: }
2492: if (!network->userEdgeJacobian && !network->userVertexJacobian) {
2493: /* user does not provide Jacobian blocks */
2494: PetscCall(DMCreateMatrix_Plex(network->plex, J));
2495: PetscCall(MatSetDM(*J, dm));
2496: PetscFunctionReturn(PETSC_SUCCESS);
2497: }
2499: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
2500: PetscCall(DMGetGlobalSection(network->plex, §ionGlobal));
2501: PetscCall(PetscSectionGetConstrainedStorageSize(sectionGlobal, &localSize));
2502: PetscCall(MatSetSizes(*J, localSize, localSize, PETSC_DETERMINE, PETSC_DETERMINE));
2504: PetscCall(MatSetType(*J, MATAIJ));
2505: PetscCall(MatSetFromOptions(*J));
2507: /* (1) Set matrix preallocation */
2508: /*------------------------------*/
2509: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2510: PetscCall(VecCreate(comm, &vd_nz));
2511: PetscCall(VecSetSizes(vd_nz, localSize, PETSC_DECIDE));
2512: PetscCall(VecSetFromOptions(vd_nz));
2513: PetscCall(VecSet(vd_nz, 0.0));
2514: PetscCall(VecDuplicate(vd_nz, &vo_nz));
2516: /* Set preallocation for edges */
2517: /*-----------------------------*/
2518: PetscCall(DMNetworkGetEdgeRange(dm, &eStart, &eEnd));
2520: PetscCall(PetscMalloc1(localSize, &rows));
2521: for (e = eStart; e < eEnd; e++) {
2522: /* Get row indices */
2523: PetscCall(DMNetworkGetGlobalVecOffset(dm, e, ALL_COMPONENTS, &rstart));
2524: PetscCall(PetscSectionGetDof(network->DofSection, e, &nrows));
2525: if (nrows) {
2526: for (j = 0; j < nrows; j++) rows[j] = j + rstart;
2528: /* Set preallocation for connected vertices */
2529: PetscCall(DMNetworkGetConnectedVertices(dm, e, &cone));
2530: for (v = 0; v < 2; v++) {
2531: PetscCall(PetscSectionGetDof(network->DofSection, cone[v], &ncols));
2533: if (network->Je) {
2534: Juser = network->Je[3 * e + 1 + v]; /* Jacobian(e,v) */
2535: } else Juser = NULL;
2536: PetscCall(DMNetworkIsGhostVertex(dm, cone[v], &ghost));
2537: PetscCall(MatSetPreallocationblock_private(Juser, nrows, rows, ncols, ghost, vd_nz, vo_nz));
2538: }
2540: /* Set preallocation for edge self */
2541: cstart = rstart;
2542: if (network->Je) {
2543: Juser = network->Je[3 * e]; /* Jacobian(e,e) */
2544: } else Juser = NULL;
2545: PetscCall(MatSetPreallocationblock_private(Juser, nrows, rows, nrows, PETSC_FALSE, vd_nz, vo_nz));
2546: }
2547: }
2549: /* Set preallocation for vertices */
2550: /*--------------------------------*/
2551: PetscCall(DMNetworkGetVertexRange(dm, &vStart, &vEnd));
2552: if (vEnd - vStart) vptr = network->Jvptr;
2554: for (v = vStart; v < vEnd; v++) {
2555: /* Get row indices */
2556: PetscCall(DMNetworkGetGlobalVecOffset(dm, v, ALL_COMPONENTS, &rstart));
2557: PetscCall(PetscSectionGetDof(network->DofSection, v, &nrows));
2558: if (!nrows) continue;
2560: PetscCall(DMNetworkIsGhostVertex(dm, v, &ghost));
2561: if (ghost) {
2562: PetscCall(PetscMalloc1(nrows, &rows_v));
2563: } else {
2564: rows_v = rows;
2565: }
2567: for (j = 0; j < nrows; j++) rows_v[j] = j + rstart;
2569: /* Get supporting edges and connected vertices */
2570: PetscCall(DMNetworkGetSupportingEdges(dm, v, &nedges, &edges));
2572: for (e = 0; e < nedges; e++) {
2573: /* Supporting edges */
2574: PetscCall(DMNetworkGetGlobalVecOffset(dm, edges[e], ALL_COMPONENTS, &cstart));
2575: PetscCall(PetscSectionGetDof(network->DofSection, edges[e], &ncols));
2577: if (network->Jv) {
2578: Juser = network->Jv[vptr[v - vStart] + 2 * e + 1]; /* Jacobian(v,e) */
2579: } else Juser = NULL;
2580: PetscCall(MatSetPreallocationblock_private(Juser, nrows, rows_v, ncols, ghost, vd_nz, vo_nz));
2582: /* Connected vertices */
2583: PetscCall(DMNetworkGetConnectedVertices(dm, edges[e], &cone));
2584: vc = (v == cone[0]) ? cone[1] : cone[0];
2585: PetscCall(DMNetworkIsGhostVertex(dm, vc, &ghost_vc));
2587: PetscCall(PetscSectionGetDof(network->DofSection, vc, &ncols));
2589: if (network->Jv) {
2590: Juser = network->Jv[vptr[v - vStart] + 2 * e + 2]; /* Jacobian(v,vc) */
2591: } else Juser = NULL;
2592: if (ghost_vc || ghost) {
2593: ghost2 = PETSC_TRUE;
2594: } else {
2595: ghost2 = PETSC_FALSE;
2596: }
2597: PetscCall(MatSetPreallocationblock_private(Juser, nrows, rows_v, ncols, ghost2, vd_nz, vo_nz));
2598: }
2600: /* Set preallocation for vertex self */
2601: PetscCall(DMNetworkIsGhostVertex(dm, v, &ghost));
2602: if (!ghost) {
2603: PetscCall(DMNetworkGetGlobalVecOffset(dm, v, ALL_COMPONENTS, &cstart));
2604: if (network->Jv) {
2605: Juser = network->Jv[vptr[v - vStart]]; /* Jacobian(v,v) */
2606: } else Juser = NULL;
2607: PetscCall(MatSetPreallocationblock_private(Juser, nrows, rows_v, nrows, PETSC_FALSE, vd_nz, vo_nz));
2608: }
2609: if (ghost) PetscCall(PetscFree(rows_v));
2610: }
2612: PetscCall(VecAssemblyBegin(vd_nz));
2613: PetscCall(VecAssemblyBegin(vo_nz));
2615: PetscCall(PetscMalloc2(localSize, &dnnz, localSize, &onnz));
2617: PetscCall(VecAssemblyEnd(vd_nz));
2618: PetscCall(VecAssemblyEnd(vo_nz));
2620: PetscCall(VecGetArray(vd_nz, &vdnz));
2621: PetscCall(VecGetArray(vo_nz, &vonz));
2622: for (j = 0; j < localSize; j++) {
2623: dnnz[j] = (PetscInt)PetscRealPart(vdnz[j]);
2624: onnz[j] = (PetscInt)PetscRealPart(vonz[j]);
2625: }
2626: PetscCall(VecRestoreArray(vd_nz, &vdnz));
2627: PetscCall(VecRestoreArray(vo_nz, &vonz));
2628: PetscCall(VecDestroy(&vd_nz));
2629: PetscCall(VecDestroy(&vo_nz));
2631: PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnnz));
2632: PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnnz, 0, onnz));
2633: PetscCall(MatSetOption(*J, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
2635: PetscCall(PetscFree2(dnnz, onnz));
2637: /* (2) Set matrix entries for edges */
2638: /*----------------------------------*/
2639: for (e = eStart; e < eEnd; e++) {
2640: /* Get row indices */
2641: PetscCall(DMNetworkGetGlobalVecOffset(dm, e, ALL_COMPONENTS, &rstart));
2642: PetscCall(PetscSectionGetDof(network->DofSection, e, &nrows));
2643: if (nrows) {
2644: for (j = 0; j < nrows; j++) rows[j] = j + rstart;
2646: /* Set matrix entries for connected vertices */
2647: PetscCall(DMNetworkGetConnectedVertices(dm, e, &cone));
2648: for (v = 0; v < 2; v++) {
2649: PetscCall(DMNetworkGetGlobalVecOffset(dm, cone[v], ALL_COMPONENTS, &cstart));
2650: PetscCall(PetscSectionGetDof(network->DofSection, cone[v], &ncols));
2652: if (network->Je) {
2653: Juser = network->Je[3 * e + 1 + v]; /* Jacobian(e,v) */
2654: } else Juser = NULL;
2655: PetscCall(MatSetblock_private(Juser, nrows, rows, ncols, cstart, J));
2656: }
2658: /* Set matrix entries for edge self */
2659: cstart = rstart;
2660: if (network->Je) {
2661: Juser = network->Je[3 * e]; /* Jacobian(e,e) */
2662: } else Juser = NULL;
2663: PetscCall(MatSetblock_private(Juser, nrows, rows, nrows, cstart, J));
2664: }
2665: }
2667: /* Set matrix entries for vertices */
2668: /*---------------------------------*/
2669: for (v = vStart; v < vEnd; v++) {
2670: /* Get row indices */
2671: PetscCall(DMNetworkGetGlobalVecOffset(dm, v, ALL_COMPONENTS, &rstart));
2672: PetscCall(PetscSectionGetDof(network->DofSection, v, &nrows));
2673: if (!nrows) continue;
2675: PetscCall(DMNetworkIsGhostVertex(dm, v, &ghost));
2676: if (ghost) {
2677: PetscCall(PetscMalloc1(nrows, &rows_v));
2678: } else {
2679: rows_v = rows;
2680: }
2681: for (j = 0; j < nrows; j++) rows_v[j] = j + rstart;
2683: /* Get supporting edges and connected vertices */
2684: PetscCall(DMNetworkGetSupportingEdges(dm, v, &nedges, &edges));
2686: for (e = 0; e < nedges; e++) {
2687: /* Supporting edges */
2688: PetscCall(DMNetworkGetGlobalVecOffset(dm, edges[e], ALL_COMPONENTS, &cstart));
2689: PetscCall(PetscSectionGetDof(network->DofSection, edges[e], &ncols));
2691: if (network->Jv) {
2692: Juser = network->Jv[vptr[v - vStart] + 2 * e + 1]; /* Jacobian(v,e) */
2693: } else Juser = NULL;
2694: PetscCall(MatSetblock_private(Juser, nrows, rows_v, ncols, cstart, J));
2696: /* Connected vertices */
2697: PetscCall(DMNetworkGetConnectedVertices(dm, edges[e], &cone));
2698: vc = (v == cone[0]) ? cone[1] : cone[0];
2700: PetscCall(DMNetworkGetGlobalVecOffset(dm, vc, ALL_COMPONENTS, &cstart));
2701: PetscCall(PetscSectionGetDof(network->DofSection, vc, &ncols));
2703: if (network->Jv) {
2704: Juser = network->Jv[vptr[v - vStart] + 2 * e + 2]; /* Jacobian(v,vc) */
2705: } else Juser = NULL;
2706: PetscCall(MatSetblock_private(Juser, nrows, rows_v, ncols, cstart, J));
2707: }
2709: /* Set matrix entries for vertex self */
2710: if (!ghost) {
2711: PetscCall(DMNetworkGetGlobalVecOffset(dm, v, ALL_COMPONENTS, &cstart));
2712: if (network->Jv) {
2713: Juser = network->Jv[vptr[v - vStart]]; /* Jacobian(v,v) */
2714: } else Juser = NULL;
2715: PetscCall(MatSetblock_private(Juser, nrows, rows_v, nrows, cstart, J));
2716: }
2717: if (ghost) PetscCall(PetscFree(rows_v));
2718: }
2719: PetscCall(PetscFree(rows));
2721: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
2722: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
2724: PetscCall(MatSetDM(*J, dm));
2725: PetscFunctionReturn(PETSC_SUCCESS);
2726: }
2728: static PetscErrorCode DMNetworkDestroyComponentData(DM dm)
2729: {
2730: DM_Network *network = (DM_Network *)dm->data;
2731: PetscInt j, np;
2733: PetscFunctionBegin;
2734: if (network->header) {
2735: np = network->cloneshared->pEnd - network->cloneshared->pStart;
2736: for (j = 0; j < np; j++) {
2737: PetscCall(PetscFree5(network->header[j].size, network->header[j].key, network->header[j].offset, network->header[j].nvar, network->header[j].offsetvarrel));
2738: PetscCall(PetscFree(network->cvalue[j].data));
2739: }
2740: PetscCall(PetscFree2(network->header, network->cvalue));
2741: }
2742: PetscFunctionReturn(PETSC_SUCCESS);
2743: }
2745: PetscErrorCode DMDestroy_Network(DM dm)
2746: {
2747: DM_Network *network = (DM_Network *)dm->data;
2748: PetscInt j;
2750: PetscFunctionBegin;
2751: /*
2752: Developer Note: Due to the mixed topological definition of DMNetwork and data defined ON the
2753: network like DofSection, DataSection, *componentdataarray, and friends, when cloning, we share
2754: only the true topological data, and make our own data ON the network. Thus refct only refers
2755: to the number of references to topological data, and data ON the network is always destroyed.
2756: It is understood this is atypical for a DM, but is very intentional.
2757: */
2759: /* Always destroy data ON the network */
2760: PetscCall(PetscFree(network->Je));
2761: if (network->Jv) {
2762: PetscCall(PetscFree(network->Jvptr));
2763: PetscCall(PetscFree(network->Jv));
2764: }
2765: PetscCall(PetscSectionDestroy(&network->DataSection));
2766: PetscCall(PetscSectionDestroy(&network->DofSection));
2767: PetscCall(PetscFree(network->component));
2768: PetscCall(PetscFree(network->componentdataarray));
2769: PetscCall(DMNetworkDestroyComponentData(dm));
2771: PetscCall(DMDestroy(&network->plex)); /* this is cloned in DMClone_Network, so safe to destroy */
2773: /*
2774: Developer Note: The DMNetworkVertexInfo and DMNetworkEdgeInfo data structures are completely
2775: destroyed as they are again a mix of topological data:
2776: ISLocalToGlobalMapping mapping;
2777: PetscSF sf;
2778: and data ON the network
2779: PetscSection DofSection;
2780: PetscSection GlobalDofSection;
2781: And the only way to access them currently is through DMNetworkAssembleGraphStructures which assembles
2782: everything. So we must destroy everything and require DMNetworkAssembleGraphStructures is called again
2783: for any clone.
2784: */
2785: PetscCall(ISLocalToGlobalMappingDestroy(&network->vertex.mapping));
2786: PetscCall(PetscSectionDestroy(&network->vertex.DofSection));
2787: PetscCall(PetscSectionDestroy(&network->vertex.GlobalDofSection));
2788: PetscCall(PetscSFDestroy(&network->vertex.sf));
2789: /* edge */
2790: PetscCall(ISLocalToGlobalMappingDestroy(&network->edge.mapping));
2791: PetscCall(PetscSectionDestroy(&network->edge.DofSection));
2792: PetscCall(PetscSectionDestroy(&network->edge.GlobalDofSection));
2793: PetscCall(PetscSFDestroy(&network->edge.sf));
2794: /* viewer options */
2795: PetscCall(ISDestroy(&network->vieweroptions.viewranks));
2796: /* Destroy the potentially cloneshared data */
2797: if (--network->cloneshared->refct <= 0) {
2798: /* Developer Note: I'm not sure if vltog can be reused or not, as I'm not sure what it's purpose is. I
2799: naively think it can be reused. */
2800: PetscCall(PetscFree(network->cloneshared->vltog));
2801: for (j = 0; j < network->cloneshared->Nsvtx; j++) PetscCall(PetscFree(network->cloneshared->svtx[j].sv));
2802: PetscCall(PetscFree(network->cloneshared->svtx));
2803: PetscCall(PetscFree2(network->cloneshared->subnetedge, network->cloneshared->subnetvtx));
2804: PetscCall(PetscHMapIDestroy(&network->cloneshared->svtable));
2805: PetscCall(PetscFree(network->cloneshared->subnet));
2806: PetscCall(PetscFree(network->cloneshared));
2807: }
2808: PetscCall(PetscFree(network)); /* Always freed as this structure is copied in a clone, not cloneshared */
2809: PetscFunctionReturn(PETSC_SUCCESS);
2810: }
2812: PetscErrorCode DMGlobalToLocalBegin_Network(DM dm, Vec g, InsertMode mode, Vec l)
2813: {
2814: DM_Network *network = (DM_Network *)dm->data;
2816: PetscFunctionBegin;
2817: PetscCall(DMGlobalToLocalBegin(network->plex, g, mode, l));
2818: PetscFunctionReturn(PETSC_SUCCESS);
2819: }
2821: PetscErrorCode DMGlobalToLocalEnd_Network(DM dm, Vec g, InsertMode mode, Vec l)
2822: {
2823: DM_Network *network = (DM_Network *)dm->data;
2825: PetscFunctionBegin;
2826: PetscCall(DMGlobalToLocalEnd(network->plex, g, mode, l));
2827: PetscFunctionReturn(PETSC_SUCCESS);
2828: }
2830: PetscErrorCode DMLocalToGlobalBegin_Network(DM dm, Vec l, InsertMode mode, Vec g)
2831: {
2832: DM_Network *network = (DM_Network *)dm->data;
2834: PetscFunctionBegin;
2835: PetscCall(DMLocalToGlobalBegin(network->plex, l, mode, g));
2836: PetscFunctionReturn(PETSC_SUCCESS);
2837: }
2839: PetscErrorCode DMLocalToGlobalEnd_Network(DM dm, Vec l, InsertMode mode, Vec g)
2840: {
2841: DM_Network *network = (DM_Network *)dm->data;
2843: PetscFunctionBegin;
2844: PetscCall(DMLocalToGlobalEnd(network->plex, l, mode, g));
2845: PetscFunctionReturn(PETSC_SUCCESS);
2846: }
2848: /*@
2849: DMNetworkGetVertexLocalToGlobalOrdering - Get vertex global index
2851: Not Collective
2853: Input Parameters:
2854: + dm - the `DMNETWORK` object
2855: - vloc - local vertex ordering, start from 0
2857: Output Parameter:
2858: . vg - global vertex ordering, start from 0
2860: Level: advanced
2862: .seealso: `DM`, `DMNETWORK`, `DMNetworkSetVertexLocalToGlobalOrdering()`
2863: @*/
2864: PetscErrorCode DMNetworkGetVertexLocalToGlobalOrdering(DM dm, PetscInt vloc, PetscInt *vg)
2865: {
2866: DM_Network *network = (DM_Network *)dm->data;
2867: PetscInt *vltog = network->cloneshared->vltog;
2869: PetscFunctionBegin;
2870: PetscCheck(vltog, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Must call DMNetworkSetVertexLocalToGlobalOrdering() first");
2871: *vg = vltog[vloc];
2872: PetscFunctionReturn(PETSC_SUCCESS);
2873: }
2875: /*@
2876: DMNetworkSetVertexLocalToGlobalOrdering - Create and setup vertex local to global map
2878: Collective
2880: Input Parameters:
2881: . dm - the `DMNETWORK` object
2883: Level: advanced
2885: .seealso: `DM`, `DMNETWORK`, `DMNetworkGetGlobalVertexIndex()`
2886: @*/
2887: PetscErrorCode DMNetworkSetVertexLocalToGlobalOrdering(DM dm)
2888: {
2889: DM_Network *network = (DM_Network *)dm->data;
2890: MPI_Comm comm;
2891: PetscMPIInt rank, size, *displs = NULL, *recvcounts = NULL, remoterank;
2892: PetscBool ghost;
2893: PetscInt *vltog, nroots, nleaves, i, *vrange, k, N, lidx;
2894: const PetscSFNode *iremote;
2895: PetscSF vsf;
2896: Vec Vleaves, Vleaves_seq;
2897: VecScatter ctx;
2898: PetscScalar *varr, val;
2899: const PetscScalar *varr_read;
2901: PetscFunctionBegin;
2902: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2903: PetscCallMPI(MPI_Comm_size(comm, &size));
2904: PetscCallMPI(MPI_Comm_rank(comm, &rank));
2906: if (size == 1) {
2907: nroots = network->cloneshared->vEnd - network->cloneshared->vStart;
2908: PetscCall(PetscMalloc1(nroots, &vltog));
2909: for (i = 0; i < nroots; i++) vltog[i] = i;
2910: network->cloneshared->vltog = vltog;
2911: PetscFunctionReturn(PETSC_SUCCESS);
2912: }
2914: PetscCheck(network->cloneshared->distributecalled, comm, PETSC_ERR_ARG_WRONGSTATE, "Must call DMNetworkDistribute() first");
2915: if (network->cloneshared->vltog) PetscCall(PetscFree(network->cloneshared->vltog));
2917: PetscCall(DMNetworkSetSubMap_private(dm, network->cloneshared->vStart, network->cloneshared->vEnd, &network->vertex.mapping));
2918: PetscCall(PetscSFGetSubSF(network->plex->sf, network->vertex.mapping, &network->vertex.sf));
2919: vsf = network->vertex.sf;
2921: PetscCall(PetscMalloc3(size + 1, &vrange, size, &displs, size, &recvcounts));
2922: PetscCall(PetscSFGetGraph(vsf, &nroots, &nleaves, NULL, &iremote));
2924: for (i = 0; i < size; i++) {
2925: displs[i] = i;
2926: recvcounts[i] = 1;
2927: }
2929: i = nroots - nleaves; /* local number of vertices, excluding ghosts */
2930: vrange[0] = 0;
2931: PetscCallMPI(MPI_Allgatherv(&i, 1, MPIU_INT, vrange + 1, recvcounts, displs, MPIU_INT, comm));
2932: for (i = 2; i < size + 1; i++) vrange[i] += vrange[i - 1];
2934: PetscCall(PetscMalloc1(nroots, &vltog));
2935: network->cloneshared->vltog = vltog;
2937: /* Set vltog for non-ghost vertices */
2938: k = 0;
2939: for (i = 0; i < nroots; i++) {
2940: PetscCall(DMNetworkIsGhostVertex(dm, i + network->cloneshared->vStart, &ghost));
2941: if (ghost) continue;
2942: vltog[i] = vrange[rank] + k++;
2943: }
2944: PetscCall(PetscFree3(vrange, displs, recvcounts));
2946: /* Set vltog for ghost vertices */
2947: /* (a) create parallel Vleaves and sequential Vleaves_seq to convert local iremote[*].index to global index */
2948: PetscCall(VecCreate(comm, &Vleaves));
2949: PetscCall(VecSetSizes(Vleaves, 2 * nleaves, PETSC_DETERMINE));
2950: PetscCall(VecSetFromOptions(Vleaves));
2951: PetscCall(VecGetArray(Vleaves, &varr));
2952: for (i = 0; i < nleaves; i++) {
2953: varr[2 * i] = (PetscScalar)(iremote[i].rank); /* rank of remote process */
2954: varr[2 * i + 1] = (PetscScalar)(iremote[i].index); /* local index in remote process */
2955: }
2956: PetscCall(VecRestoreArray(Vleaves, &varr));
2958: /* (b) scatter local info to remote processes via VecScatter() */
2959: PetscCall(VecScatterCreateToAll(Vleaves, &ctx, &Vleaves_seq));
2960: PetscCall(VecScatterBegin(ctx, Vleaves, Vleaves_seq, INSERT_VALUES, SCATTER_FORWARD));
2961: PetscCall(VecScatterEnd(ctx, Vleaves, Vleaves_seq, INSERT_VALUES, SCATTER_FORWARD));
2963: /* (c) convert local indices to global indices in parallel vector Vleaves */
2964: PetscCall(VecGetSize(Vleaves_seq, &N));
2965: PetscCall(VecGetArrayRead(Vleaves_seq, &varr_read));
2966: for (i = 0; i < N; i += 2) {
2967: remoterank = (PetscMPIInt)PetscRealPart(varr_read[i]);
2968: if (remoterank == rank) {
2969: k = i + 1; /* row number */
2970: lidx = (PetscInt)PetscRealPart(varr_read[i + 1]);
2971: val = (PetscScalar)vltog[lidx]; /* global index for non-ghost vertex computed above */
2972: PetscCall(VecSetValues(Vleaves, 1, &k, &val, INSERT_VALUES));
2973: }
2974: }
2975: PetscCall(VecRestoreArrayRead(Vleaves_seq, &varr_read));
2976: PetscCall(VecAssemblyBegin(Vleaves));
2977: PetscCall(VecAssemblyEnd(Vleaves));
2979: /* (d) Set vltog for ghost vertices by copying local values of Vleaves */
2980: PetscCall(VecGetArrayRead(Vleaves, &varr_read));
2981: k = 0;
2982: for (i = 0; i < nroots; i++) {
2983: PetscCall(DMNetworkIsGhostVertex(dm, i + network->cloneshared->vStart, &ghost));
2984: if (!ghost) continue;
2985: vltog[i] = (PetscInt)PetscRealPart(varr_read[2 * k + 1]);
2986: k++;
2987: }
2988: PetscCall(VecRestoreArrayRead(Vleaves, &varr_read));
2990: PetscCall(VecDestroy(&Vleaves));
2991: PetscCall(VecDestroy(&Vleaves_seq));
2992: PetscCall(VecScatterDestroy(&ctx));
2993: PetscFunctionReturn(PETSC_SUCCESS);
2994: }
2996: static inline PetscErrorCode DMISAddSize_private(DM_Network *network, PetscInt p, PetscInt numkeys, PetscInt keys[], PetscInt blocksize[], PetscInt nselectedvar[], PetscInt *nidx)
2997: {
2998: PetscInt i, j, ncomps, nvar, key, offset = 0;
2999: DMNetworkComponentHeader header;
3001: PetscFunctionBegin;
3002: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offset));
3003: ncomps = ((DMNetworkComponentHeader)(network->componentdataarray + offset))->ndata;
3004: header = (DMNetworkComponentHeader)(network->componentdataarray + offset);
3006: for (i = 0; i < ncomps; i++) {
3007: key = header->key[i];
3008: nvar = header->nvar[i];
3009: for (j = 0; j < numkeys; j++) {
3010: if (key == keys[j]) {
3011: if (!blocksize || blocksize[j] == -1) {
3012: *nidx += nvar;
3013: } else {
3014: *nidx += nselectedvar[j] * nvar / blocksize[j];
3015: }
3016: }
3017: }
3018: }
3019: PetscFunctionReturn(PETSC_SUCCESS);
3020: }
3022: static inline PetscErrorCode DMISComputeIdx_private(DM dm, PetscInt p, PetscInt numkeys, PetscInt keys[], PetscInt blocksize[], PetscInt nselectedvar[], PetscInt *selectedvar[], PetscInt *ii, PetscInt *idx)
3023: {
3024: PetscInt i, j, ncomps, nvar, key, offsetg, k, k1, offset = 0;
3025: DM_Network *network = (DM_Network *)dm->data;
3026: DMNetworkComponentHeader header;
3028: PetscFunctionBegin;
3029: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offset));
3030: ncomps = ((DMNetworkComponentHeader)(network->componentdataarray + offset))->ndata;
3031: header = (DMNetworkComponentHeader)(network->componentdataarray + offset);
3033: for (i = 0; i < ncomps; i++) {
3034: key = header->key[i];
3035: nvar = header->nvar[i];
3036: for (j = 0; j < numkeys; j++) {
3037: if (key != keys[j]) continue;
3039: PetscCall(DMNetworkGetGlobalVecOffset(dm, p, i, &offsetg));
3040: if (!blocksize || blocksize[j] == -1) {
3041: for (k = 0; k < nvar; k++) idx[(*ii)++] = offsetg + k;
3042: } else {
3043: for (k = 0; k < nvar; k += blocksize[j]) {
3044: for (k1 = 0; k1 < nselectedvar[j]; k1++) idx[(*ii)++] = offsetg + k + selectedvar[j][k1];
3045: }
3046: }
3047: }
3048: }
3049: PetscFunctionReturn(PETSC_SUCCESS);
3050: }
3052: /*@
3053: DMNetworkCreateIS - Create an index set object from the global vector of the network
3055: Collective
3057: Input Parameters:
3058: + dm - `DMNETWORK` object
3059: . numkeys - number of keys
3060: . keys - array of keys that define the components of the variables you wish to extract
3061: . blocksize - block size of the variables associated to the component
3062: . nselectedvar - number of variables in each block to select
3063: - selectedvar - the offset into the block of each variable in each block to select
3065: Output Parameter:
3066: . is - the index set
3068: Level: advanced
3070: Notes:
3071: Use blocksize[i] of -1 to indicate select all the variables of the i-th component, for which nselectvar[i] and selectedvar[i] are ignored. Use` NULL`, `NULL`, `NULL` to indicate for all selected components one wishes to obtain all the values of that component. For example, `DMNetworkCreateIS`(dm,1,&key,NULL,NULL,NULL,&is) will return an is that extracts all the variables for the 0-th component.
3073: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `ISCreateGeneral()`, `DMNetworkCreateLocalIS()`
3074: @*/
3075: PetscErrorCode DMNetworkCreateIS(DM dm, PetscInt numkeys, PetscInt keys[], PetscInt blocksize[], PetscInt nselectedvar[], PetscInt *selectedvar[], IS *is)
3076: {
3077: MPI_Comm comm;
3078: DM_Network *network = (DM_Network *)dm->data;
3079: PetscInt i, p, estart, eend, vstart, vend, nidx, *idx;
3080: PetscBool ghost;
3082: PetscFunctionBegin;
3083: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
3085: /* Check input parameters */
3086: for (i = 0; i < numkeys; i++) {
3087: if (!blocksize || blocksize[i] == -1) continue;
3088: PetscCheck(nselectedvar[i] <= blocksize[i], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "number of selectedvariables %" PetscInt_FMT " cannot be larger than blocksize %" PetscInt_FMT, nselectedvar[i], blocksize[i]);
3089: }
3091: PetscCall(DMNetworkGetEdgeRange(dm, &estart, &eend));
3092: PetscCall(DMNetworkGetVertexRange(dm, &vstart, &vend));
3094: /* Get local number of idx */
3095: nidx = 0;
3096: for (p = estart; p < eend; p++) PetscCall(DMISAddSize_private(network, p, numkeys, keys, blocksize, nselectedvar, &nidx));
3097: for (p = vstart; p < vend; p++) {
3098: PetscCall(DMNetworkIsGhostVertex(dm, p, &ghost));
3099: if (ghost) continue;
3100: PetscCall(DMISAddSize_private(network, p, numkeys, keys, blocksize, nselectedvar, &nidx));
3101: }
3103: /* Compute idx */
3104: PetscCall(PetscMalloc1(nidx, &idx));
3105: i = 0;
3106: for (p = estart; p < eend; p++) PetscCall(DMISComputeIdx_private(dm, p, numkeys, keys, blocksize, nselectedvar, selectedvar, &i, idx));
3107: for (p = vstart; p < vend; p++) {
3108: PetscCall(DMNetworkIsGhostVertex(dm, p, &ghost));
3109: if (ghost) continue;
3110: PetscCall(DMISComputeIdx_private(dm, p, numkeys, keys, blocksize, nselectedvar, selectedvar, &i, idx));
3111: }
3113: /* Create is */
3114: PetscCall(ISCreateGeneral(comm, nidx, idx, PETSC_COPY_VALUES, is));
3115: PetscCall(PetscFree(idx));
3116: PetscFunctionReturn(PETSC_SUCCESS);
3117: }
3119: static inline PetscErrorCode DMISComputeLocalIdx_private(DM dm, PetscInt p, PetscInt numkeys, PetscInt keys[], PetscInt blocksize[], PetscInt nselectedvar[], PetscInt *selectedvar[], PetscInt *ii, PetscInt *idx)
3120: {
3121: PetscInt i, j, ncomps, nvar, key, offsetl, k, k1, offset = 0;
3122: DM_Network *network = (DM_Network *)dm->data;
3123: DMNetworkComponentHeader header;
3125: PetscFunctionBegin;
3126: PetscCall(PetscSectionGetOffset(network->DataSection, p, &offset));
3127: ncomps = ((DMNetworkComponentHeader)(network->componentdataarray + offset))->ndata;
3128: header = (DMNetworkComponentHeader)(network->componentdataarray + offset);
3130: for (i = 0; i < ncomps; i++) {
3131: key = header->key[i];
3132: nvar = header->nvar[i];
3133: for (j = 0; j < numkeys; j++) {
3134: if (key != keys[j]) continue;
3136: PetscCall(DMNetworkGetLocalVecOffset(dm, p, i, &offsetl));
3137: if (!blocksize || blocksize[j] == -1) {
3138: for (k = 0; k < nvar; k++) idx[(*ii)++] = offsetl + k;
3139: } else {
3140: for (k = 0; k < nvar; k += blocksize[j]) {
3141: for (k1 = 0; k1 < nselectedvar[j]; k1++) idx[(*ii)++] = offsetl + k + selectedvar[j][k1];
3142: }
3143: }
3144: }
3145: }
3146: PetscFunctionReturn(PETSC_SUCCESS);
3147: }
3149: /*@
3150: DMNetworkCreateLocalIS - Create an index set object from the local vector of the network
3152: Not Collective
3154: Input Parameters:
3155: + dm - `DMNETWORK` object
3156: . numkeys - number of keys
3157: . keys - array of keys that define the components of the variables you wish to extract
3158: . blocksize - block size of the variables associated to the component
3159: . nselectedvar - number of variables in each block to select
3160: - selectedvar - the offset into the block of each variable in each block to select
3162: Output Parameter:
3163: . is - the index set
3165: Level: advanced
3167: Notes:
3168: Use blocksize[i] of -1 to indicate select all the variables of the i-th component, for which nselectvar[i] and selectedvar[i] are ignored. Use `NULL`, `NULL`, `NULL` to indicate for all selected components one wishes to obtain all the values of that component. For example, `DMNetworkCreateLocalIS`(dm,1,&key,`NULL`,`NULL`,`NULL`,&is) will return an is that extracts all the variables for the 0-th component.
3170: .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkCreateIS()`, `ISCreateGeneral()`
3171: @*/
3172: PetscErrorCode DMNetworkCreateLocalIS(DM dm, PetscInt numkeys, PetscInt keys[], PetscInt blocksize[], PetscInt nselectedvar[], PetscInt *selectedvar[], IS *is)
3173: {
3174: DM_Network *network = (DM_Network *)dm->data;
3175: PetscInt i, p, pstart, pend, nidx, *idx;
3177: PetscFunctionBegin;
3178: /* Check input parameters */
3179: for (i = 0; i < numkeys; i++) {
3180: if (!blocksize || blocksize[i] == -1) continue;
3181: PetscCheck(nselectedvar[i] <= blocksize[i], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "number of selectedvariables %" PetscInt_FMT " cannot be larger than blocksize %" PetscInt_FMT, nselectedvar[i], blocksize[i]);
3182: }
3184: pstart = network->cloneshared->pStart;
3185: pend = network->cloneshared->pEnd;
3187: /* Get local number of idx */
3188: nidx = 0;
3189: for (p = pstart; p < pend; p++) PetscCall(DMISAddSize_private(network, p, numkeys, keys, blocksize, nselectedvar, &nidx));
3191: /* Compute local idx */
3192: PetscCall(PetscMalloc1(nidx, &idx));
3193: i = 0;
3194: for (p = pstart; p < pend; p++) PetscCall(DMISComputeLocalIdx_private(dm, p, numkeys, keys, blocksize, nselectedvar, selectedvar, &i, idx));
3196: /* Create is */
3197: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, is));
3198: PetscCall(PetscFree(idx));
3199: PetscFunctionReturn(PETSC_SUCCESS);
3200: }
3201: /*@
3202: DMNetworkFinalizeComponents - Sets up internal data structures for the sections and components. It is called after registering new components and adding all components
3203: to the cloned network. After calling this subroutine, no new components can be added to the network.
3205: Collective
3207: Input Parameter:
3208: . dm - the `DMNETWORK` object
3210: Level: beginner
3212: .seealso: `DM`, `DMNETWORK`, `DMNetworkAddComponent()`, `DMNetworkRegisterComponent()`, `DMSetUp()`
3213: @*/
3214: PetscErrorCode DMNetworkFinalizeComponents(DM dm)
3215: {
3216: DM_Network *network = (DM_Network *)dm->data;
3218: PetscFunctionBegin;
3219: if (network->componentsetup) PetscFunctionReturn(PETSC_SUCCESS);
3220: PetscCall(DMNetworkComponentSetUp(dm));
3221: PetscCall(DMNetworkVariablesSetUp(dm));
3222: PetscCall(DMSetLocalSection(network->plex, network->DofSection));
3223: PetscCall(DMGetGlobalSection(network->plex, &network->GlobalDofSection));
3224: network->componentsetup = PETSC_TRUE;
3225: PetscFunctionReturn(PETSC_SUCCESS);
3226: }