Actual source code: networkcreate.c
petsc-3.9.4 2018-09-11
1: #define PETSCDM_DLL
2: #include <petsc/private/dmnetworkimpl.h>
3: #include <petscdmda.h>
4: #include <petsc/private/vecimpl.h>
6: PetscErrorCode DMSetFromOptions_Network(PetscOptionItems *PetscOptionsObject,DM dm)
7: {
12: PetscOptionsHead(PetscOptionsObject,"DMNetwork Options");
13: PetscOptionsTail();
14: return(0);
15: }
17: /* External function declarations here */
18: extern PetscErrorCode DMCreateMatrix_Network(DM, Mat*);
19: extern PetscErrorCode DMDestroy_Network(DM);
20: extern PetscErrorCode DMView_Network(DM, PetscViewer);
21: extern PetscErrorCode DMGlobalToLocalBegin_Network(DM, Vec, InsertMode, Vec);
22: extern PetscErrorCode DMGlobalToLocalEnd_Network(DM, Vec, InsertMode, Vec);
23: extern PetscErrorCode DMLocalToGlobalBegin_Network(DM, Vec, InsertMode, Vec);
24: extern PetscErrorCode DMLocalToGlobalEnd_Network(DM, Vec, InsertMode, Vec);
25: extern PetscErrorCode DMSetUp_Network(DM);
26: extern PetscErrorCode DMClone_Network(DM, DM*);
28: static PetscErrorCode VecArrayPrint_private(PetscViewer viewer,PetscInt n,const PetscScalar *xv)
29: {
31: PetscInt i;
34: for (i=0; i<n; i++) {
35: #if defined(PETSC_USE_COMPLEX)
36: if (PetscImaginaryPart(xv[i]) > 0.0) {
37: PetscViewerASCIIPrintf(viewer," %g + %g i\n",(double)PetscRealPart(xv[i]),(double)PetscImaginaryPart(xv[i]));
38: } else if (PetscImaginaryPart(xv[i]) < 0.0) {
39: PetscViewerASCIIPrintf(viewer," %g - %g i\n",(double)PetscRealPart(xv[i]),-(double)PetscImaginaryPart(xv[i]));
40: } else {
41: PetscViewerASCIIPrintf(viewer," %g\n",(double)PetscRealPart(xv[i]));
42: }
43: #else
44: PetscViewerASCIIPrintf(viewer," %g\n",(double)xv[i]);
45: #endif
46: }
47: return(0);
48: }
50: static PetscErrorCode VecView_Network_Seq(DM networkdm,Vec X,PetscViewer viewer)
51: {
52: PetscErrorCode ierr;
53: PetscInt e,v,Start,End,offset,nvar,id;
54: const PetscScalar *xv;
57: VecGetArrayRead(X,&xv);
59: /* iterate over edges */
60: DMNetworkGetEdgeRange(networkdm,&Start,&End);
61: for (e=Start; e<End; e++) {
62: DMNetworkGetNumVariables(networkdm,e,&nvar);
63: if (!nvar) continue;
65: DMNetworkGetVariableOffset(networkdm,e,&offset);
66: DMNetworkGetGlobalEdgeIndex(networkdm,e,&id);
68: PetscViewerASCIIPrintf(viewer," Edge %D:\n",id);
69: VecArrayPrint_private(viewer,nvar,xv+offset);
70: }
72: /* iterate over vertices */
73: DMNetworkGetVertexRange(networkdm,&Start,&End);
74: for (v=Start; v<End; v++) {
75: DMNetworkGetNumVariables(networkdm,v,&nvar);
76: if (!nvar) continue;
78: DMNetworkGetVariableOffset(networkdm,v,&offset);
79: DMNetworkGetGlobalVertexIndex(networkdm,v,&id);
81: PetscViewerASCIIPrintf(viewer," Vertex %D:\n",id);
82: VecArrayPrint_private(viewer,nvar,xv+offset);
83: }
84: PetscViewerFlush(viewer);
85: VecRestoreArrayRead(X,&xv);
86: return(0);
87: }
89: static PetscErrorCode VecView_Network_MPI(DM networkdm,Vec X,PetscViewer viewer)
90: {
91: PetscErrorCode ierr;
92: PetscInt i,e,v,eStart,eEnd,vStart,vEnd,offset,nvar,len_loc,len,k;
93: const PetscScalar *xv;
94: MPI_Comm comm;
95: PetscMPIInt size,rank,tag = ((PetscObject)viewer)->tag;
96: Vec localX;
97: PetscBool ghostvtex;
98: PetscScalar *values;
99: PetscInt j,ne,nv,id;
100: MPI_Status status;
103: PetscObjectGetComm((PetscObject)networkdm,&comm);
104: MPI_Comm_size(comm,&size);
105: MPI_Comm_rank(comm,&rank);
107: DMGetLocalVector(networkdm,&localX);
108: DMGlobalToLocalBegin(networkdm,X,INSERT_VALUES,localX);
109: DMGlobalToLocalEnd(networkdm,X,INSERT_VALUES,localX);
110: VecGetArrayRead(localX,&xv);
112: VecGetLocalSize(localX,&len_loc);
114: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
115: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
116: len_loc += 2*(1 + eEnd-eStart + vEnd-vStart);
118: /* values = [nedges, nvertices; id, nvar, xedge; ...; id, nvars, xvertex;...], to be sent to proc[0] */
119: MPI_Allreduce(&len_loc,&len,1,MPIU_INT,MPI_MAX,comm);
120: PetscCalloc1(len,&values);
122: if (!rank) {
123: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);
124: }
126: /* iterate over edges */
127: k = 2;
128: for (e=eStart; e<eEnd; e++) {
129: DMNetworkGetNumVariables(networkdm,e,&nvar);
130: if (!nvar) continue;
132: DMNetworkGetVariableOffset(networkdm,e,&offset);
133: DMNetworkGetGlobalEdgeIndex(networkdm,e,&id);
135: if (!rank) { /* print its own entries */
136: PetscViewerASCIIPrintf(viewer," Edge %D:\n",id);
137: VecArrayPrint_private(viewer,nvar,xv+offset);
138: } else {
139: values[0] += 1; /* number of edges */
140: values[k++] = id;
141: values[k++] = nvar;
142: for (i=offset; i< offset+nvar; i++) values[k++] = xv[i];
143: }
144: }
146: /* iterate over vertices */
147: for (v=vStart; v<vEnd; v++) {
148: DMNetworkIsGhostVertex(networkdm,v,&ghostvtex);
149: if (ghostvtex) continue;
150: DMNetworkGetNumVariables(networkdm,v,&nvar);
151: if (!nvar) continue;
153: DMNetworkGetVariableOffset(networkdm,v,&offset);
154: DMNetworkGetGlobalVertexIndex(networkdm,v,&id);
156: if (!rank) {
157: PetscViewerASCIIPrintf(viewer," Vertex %D:\n",id);
158: VecArrayPrint_private(viewer,nvar,xv+offset);
159: } else {
160: values[1] += 1; /* number of vertices */
161: values[k++] = id;
162: values[k++] = nvar;
163: for (i=offset; i< offset+nvar; i++) values[k++] = xv[i];
164: }
165: }
167: if (!rank) {
168: /* proc[0] receives and prints messages */
169: for (j=1; j<size; j++) {
170: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
172: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,comm,&status);
174: ne = (PetscInt)PetscAbsScalar(values[0]);
175: nv = (PetscInt)PetscAbsScalar(values[1]);
177: /* print received edges */
178: k = 2;
179: for (i=0; i<ne; i++) {
180: id = (PetscInt)PetscAbsScalar(values[k++]);
181: nvar = (PetscInt)PetscAbsScalar(values[k++]);
182: PetscViewerASCIIPrintf(viewer," Edge %D:\n",id);
183: VecArrayPrint_private(viewer,nvar,values+k);
184: k += nvar;
185: }
187: /* print received vertices */
188: for (i=0; i<nv; i++) {
189: id = (PetscInt)PetscAbsScalar(values[k++]);
190: nvar = (PetscInt)PetscAbsScalar(values[k++]);
191: PetscViewerASCIIPrintf(viewer," Vertex %D:\n",id);
192: VecArrayPrint_private(viewer,nvar,values+k);
193: k += nvar;
194: }
195: }
196: } else {
197: /* sends values to proc[0] */
198: MPI_Send((void*)values,k,MPIU_SCALAR,0,tag,comm);
199: }
201: PetscFree(values);
202: VecRestoreArrayRead(localX,&xv);
203: DMRestoreLocalVector(networkdm,&localX);
204: return(0);
205: }
207: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec,PetscViewer);
209: PetscErrorCode VecView_Network(Vec v,PetscViewer viewer)
210: {
211: DM dm;
213: PetscBool isseq;
214: PetscBool iascii;
217: VecGetDM(v,&dm);
218: if (!dm) SETERRQ(PetscObjectComm((PetscObject)v),PETSC_ERR_ARG_WRONG,"Vector not generated from a DM");
219: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
220: PetscObjectTypeCompare((PetscObject)v,VECSEQ,&isseq);
222: /* Use VecView_Network if the viewer is ASCII; use VecView_Seq/MPI for other viewer formats */
223: if (iascii) {
224: if (isseq) {
225: VecView_Network_Seq(dm,v,viewer);
226: } else {
227: VecView_Network_MPI(dm,v,viewer);
228: }
229: } else {
230: if (isseq) {
231: VecView_Seq(v,viewer);
232: } else {
233: VecView_MPI(v,viewer);
234: }
235: }
236: return(0);
237: }
239: static PetscErrorCode DMCreateGlobalVector_Network(DM dm,Vec *vec)
240: {
242: DM_Network *network = (DM_Network*) dm->data;
245: DMCreateGlobalVector(network->plex,vec);
246: VecSetOperation(*vec, VECOP_VIEW, (void (*)(void)) VecView_Network);
247: VecSetDM(*vec,dm);
248: return(0);
249: }
251: static PetscErrorCode DMCreateLocalVector_Network(DM dm,Vec *vec)
252: {
254: DM_Network *network = (DM_Network*) dm->data;
257: DMCreateLocalVector(network->plex,vec);
258: VecSetDM(*vec,dm);
259: return(0);
260: }
262: PetscErrorCode DMInitialize_Network(DM dm)
263: {
267: dm->ops->view = NULL;
268: dm->ops->setfromoptions = DMSetFromOptions_Network;
269: dm->ops->clone = DMClone_Network;
270: dm->ops->setup = DMSetUp_Network;
271: dm->ops->createglobalvector = DMCreateGlobalVector_Network;
272: dm->ops->createlocalvector = DMCreateLocalVector_Network;
273: dm->ops->getlocaltoglobalmapping = NULL;
274: dm->ops->createfieldis = NULL;
275: dm->ops->createcoordinatedm = NULL;
276: dm->ops->getcoloring = 0;
277: dm->ops->creatematrix = DMCreateMatrix_Network;
278: dm->ops->createinterpolation = 0;
279: dm->ops->getaggregates = 0;
280: dm->ops->getinjection = 0;
281: dm->ops->refine = 0;
282: dm->ops->coarsen = 0;
283: dm->ops->refinehierarchy = 0;
284: dm->ops->coarsenhierarchy = 0;
285: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Network;
286: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Network;
287: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Network;
288: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Network;
289: dm->ops->destroy = DMDestroy_Network;
290: dm->ops->createsubdm = NULL;
291: dm->ops->locatepoints = NULL;
292: return(0);
293: }
295: PetscErrorCode DMClone_Network(DM dm, DM *newdm)
296: {
297: DM_Network *network = (DM_Network *) dm->data;
301: network->refct++;
302: (*newdm)->data = network;
303: PetscObjectChangeTypeName((PetscObject) *newdm, DMNETWORK);
304: DMInitialize_Network(*newdm);
305: return(0);
306: }
308: /*MC
309: DMNETWORK = "network" - A DM object that encapsulates an unstructured network. The implementation is based on the DM object
310: DMPlex that manages unstructured grids. Distributed networks use a non-overlapping partitioning of
311: the edges. In the local representation, Vecs contain all unknowns in the interior and shared boundary.
312: This is specified by a PetscSection object. Ownership in the global representation is determined by
313: ownership of the underlying DMPlex points. This is specified by another PetscSection object.
315: Level: intermediate
317: .seealso: DMType, DMNetworkCreate(), DMCreate(), DMSetType()
318: M*/
320: PETSC_EXTERN PetscErrorCode DMCreate_Network(DM dm)
321: {
322: DM_Network *network;
327: PetscNewLog(dm,&network);
328: dm->data = network;
330: network->refct = 1;
331: network->NVertices = 0;
332: network->NEdges = 0;
333: network->nVertices = 0;
334: network->nEdges = 0;
335: network->nsubnet = 0;
338: DMInitialize_Network(dm);
339: return(0);
340: }
342: /*@
343: DMNetworkCreate - Creates a DMNetwork object, which encapsulates an unstructured network.
345: Collective on MPI_Comm
347: Input Parameter:
348: . comm - The communicator for the DMNetwork object
350: Output Parameter:
351: . network - The DMNetwork object
353: Level: beginner
355: .keywords: DMNetwork, create
356: @*/
357: PetscErrorCode DMNetworkCreate(MPI_Comm comm, DM *network)
358: {
363: DMCreate(comm, network);
364: DMSetType(*network, DMNETWORK);
365: return(0);
366: }