Actual source code: ex1.c
petsc-3.7.7 2017-09-25
1: static const char help[] = "Test star forest communication (PetscSF)\n\n";
3: /*T
4: Description: A star forest is a simple tree with one root and zero or more leaves.
5: Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
6: This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
7: T*/
9: /*
10: Include petscsf.h so we can use PetscSF objects. Note that this automatically
11: includes petscsys.h.
12: */
13: #include <petscsf.h>
14: #include <petscviewer.h>
18: int main(int argc,char **argv)
19: {
21: PetscInt i,nroots,nrootsalloc,nleaves,nleavesalloc,*mine,stride;
22: PetscSFNode *remote;
23: PetscMPIInt rank,size;
24: PetscSF sf;
25: PetscBool test_bcast,test_reduce,test_degree,test_fetchandop,test_gather,test_scatter,test_embed,test_invert;
26: MPI_Op mop;
27: char opstring[256];
28: PetscBool strflg;
30: PetscInitialize(&argc,&argv,(char*)0,help);
31: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
32: MPI_Comm_size(PETSC_COMM_WORLD,&size);
34: PetscOptionsBegin(PETSC_COMM_WORLD,"","PetscSF Test Options","none");
35: test_bcast = PETSC_FALSE;
36: PetscOptionsBool("-test_bcast","Test broadcast","",test_bcast,&test_bcast,NULL);
37: test_reduce = PETSC_FALSE;
38: PetscOptionsBool("-test_reduce","Test reduction","",test_reduce,&test_reduce,NULL);
39: mop = MPI_SUM;
40: PetscStrcpy(opstring,"sum");
41: PetscOptionsString("-test_op","Designate which MPI_Op to use","",opstring,opstring,256,NULL);
42: PetscStrcmp("sum",opstring,&strflg);
43: if (strflg) {
44: mop = MPIU_SUM;
45: }
46: PetscStrcmp("prod",opstring,&strflg);
47: if (strflg) {
48: mop = MPI_PROD;
49: }
50: PetscStrcmp("max",opstring,&strflg);
51: if (strflg) {
52: mop = MPI_MAX;
53: }
54: PetscStrcmp("min",opstring,&strflg);
55: if (strflg) {
56: mop = MPI_MIN;
57: }
58: PetscStrcmp("land",opstring,&strflg);
59: if (strflg) {
60: mop = MPI_LAND;
61: }
62: PetscStrcmp("band",opstring,&strflg);
63: if (strflg) {
64: mop = MPI_BAND;
65: }
66: PetscStrcmp("lor",opstring,&strflg);
67: if (strflg) {
68: mop = MPI_LOR;
69: }
70: PetscStrcmp("bor",opstring,&strflg);
71: if (strflg) {
72: mop = MPI_BOR;
73: }
74: PetscStrcmp("lxor",opstring,&strflg);
75: if (strflg) {
76: mop = MPI_LXOR;
77: }
78: PetscStrcmp("bxor",opstring,&strflg);
79: if (strflg) {
80: mop = MPI_BXOR;
81: }
82: test_degree = PETSC_FALSE;
83: PetscOptionsBool("-test_degree","Test computation of vertex degree","",test_degree,&test_degree,NULL);
84: test_fetchandop = PETSC_FALSE;
85: PetscOptionsBool("-test_fetchandop","Test atomic Fetch-And-Op","",test_fetchandop,&test_fetchandop,NULL);
86: test_gather = PETSC_FALSE;
87: PetscOptionsBool("-test_gather","Test point gather","",test_gather,&test_gather,NULL);
88: test_scatter = PETSC_FALSE;
89: PetscOptionsBool("-test_scatter","Test point scatter","",test_scatter,&test_scatter,NULL);
90: test_embed = PETSC_FALSE;
91: PetscOptionsBool("-test_embed","Test point embed","",test_embed,&test_embed,NULL);
92: test_invert = PETSC_FALSE;
93: PetscOptionsBool("-test_invert","Test point invert","",test_invert,&test_invert,NULL);
94: stride = 1;
95: PetscOptionsInt("-stride","Stride for leaf and root data","",stride,&stride,NULL);
96: PetscOptionsEnd();
98: nroots = 2 + (PetscInt)(rank == 0);
99: nrootsalloc = nroots * stride;
100: nleaves = 2 + (PetscInt)(rank > 0);
101: nleavesalloc = nleaves * stride;
102: mine = NULL;
103: if (stride > 1) {
104: PetscInt i;
106: PetscMalloc1(nleaves,&mine);
107: for (i = 0; i < nleaves; i++) {
108: mine[i] = stride * i;
109: }
110: }
111: PetscMalloc1(nleaves,&remote);
112: /* Left periodic neighbor */
113: remote[0].rank = (rank+size-1)%size;
114: remote[0].index = 1 * stride;
115: /* Right periodic neighbor */
116: remote[1].rank = (rank+1)%size;
117: remote[1].index = 0 * stride;
118: if (rank > 0) { /* All processes reference rank 0, index 1 */
119: remote[2].rank = 0;
120: remote[2].index = 2 * stride;
121: }
123: /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
124: PetscSFCreate(PETSC_COMM_WORLD,&sf);
125: PetscSFSetFromOptions(sf);
126: PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
128: /* View graph, mostly useful for debugging purposes. */
129: PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
130: PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD);
131: PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
134: if (test_bcast) { /* broadcast rootdata into leafdata */
135: PetscInt *rootdata,*leafdata;
136: /* Allocate space for send and recieve buffers. This example communicates PetscInt, but other types, including
137: * user-defined structures, could also be used. */
138: PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
139: /* Set rootdata buffer to be broadcast */
140: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
141: for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
142: /* Initialize local buffer, these values are never used. */
143: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
144: /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
145: PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
146: PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
147: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n");
148: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
149: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n");
150: PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
151: PetscFree2(rootdata,leafdata);
152: }
154: if (test_reduce) { /* Reduce leafdata into rootdata */
155: PetscInt *rootdata,*leafdata;
156: PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
157: /* Initialize rootdata buffer in which the result of the reduction will appear. */
158: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
159: for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
160: /* Set leaf values to reduce. */
161: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
162: for (i=0; i<nleaves; i++) leafdata[i*stride] = 1000*(rank+1) + 10*i;
163: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata\n");
164: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
165: /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
166: * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
167: PetscSFReduceBegin(sf,MPIU_INT,leafdata,rootdata,mop);
168: PetscSFReduceEnd(sf,MPIU_INT,leafdata,rootdata,mop);
169: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata\n");
170: PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
171: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata\n");
172: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
173: PetscFree2(rootdata,leafdata);
174: }
176: if (test_degree) {
177: const PetscInt *degree;
178: PetscSFComputeDegreeBegin(sf,°ree);
179: PetscSFComputeDegreeEnd(sf,°ree);
180: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Root degrees\n");
181: PetscIntView(nrootsalloc,degree,PETSC_VIEWER_STDOUT_WORLD);
182: }
184: if (test_fetchandop) {
185: /* Cannot use text compare here because token ordering is not deterministic */
186: PetscInt *leafdata,*leafupdate,*rootdata;
187: PetscMalloc3(nleavesalloc,&leafdata,nleavesalloc,&leafupdate,nrootsalloc,&rootdata);
188: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
189: for (i=0; i<nleaves; i++) leafdata[i*stride] = 1;
190: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
191: for (i=0; i<nroots; i++) rootdata[i*stride] = 0;
192: PetscSFFetchAndOpBegin(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
193: PetscSFFetchAndOpEnd(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
194: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Rootdata (sum of 1 from each leaf)\n");
195: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
196: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Leafupdate (value at roots prior to my atomic update)\n");
197: PetscIntView(nleavesalloc,leafupdate,PETSC_VIEWER_STDOUT_WORLD);
198: PetscFree3(leafdata,leafupdate,rootdata);
199: }
201: if (test_gather) {
202: const PetscInt *degree;
203: PetscInt inedges,*indata,*outdata;
204: PetscSFComputeDegreeBegin(sf,°ree);
205: PetscSFComputeDegreeEnd(sf,°ree);
206: for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
207: PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
208: for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
209: for (i=0; i<nleaves; i++) outdata[i*stride] = 1000*(rank+1) + i;
210: PetscSFGatherBegin(sf,MPIU_INT,outdata,indata);
211: PetscSFGatherEnd(sf,MPIU_INT,outdata,indata);
212: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Gathered data at multi-roots from leaves\n");
213: PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
214: PetscFree2(indata,outdata);
215: }
217: if (test_scatter) {
218: const PetscInt *degree;
219: PetscInt j,count,inedges,*indata,*outdata;
220: PetscSFComputeDegreeBegin(sf,°ree);
221: PetscSFComputeDegreeEnd(sf,°ree);
222: for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
223: PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
224: for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
225: for (i=0,count=0; i<nrootsalloc; i++) {
226: for (j=0; j<degree[i]; j++) indata[count++] = 1000*(rank+1) + 100*i + j;
227: }
228: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Data at multi-roots, to scatter to leaves\n");
229: PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
231: PetscSFScatterBegin(sf,MPIU_INT,indata,outdata);
232: PetscSFScatterEnd(sf,MPIU_INT,indata,outdata);
233: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Scattered data at leaves\n");
234: PetscIntView(nleavesalloc,outdata,PETSC_VIEWER_STDOUT_WORLD);
235: PetscFree2(indata,outdata);
236: }
238: if (test_embed) {
239: const PetscInt nroots = 1 + (PetscInt) !rank,selected[] = {1*stride,2*stride};
240: PetscSF esf;
241: PetscSFCreateEmbeddedSF(sf,nroots,selected,&esf);
242: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Embedded PetscSF\n");
243: PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
244: PetscSFView(esf,PETSC_VIEWER_STDOUT_WORLD);
245: PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
246: PetscSFDestroy(&esf);
247: }
249: if (test_invert) {
250: PetscSF msf,imsf;
251: PetscSFGetMultiSF(sf,&msf);
252: PetscSFCreateInverseSF(msf,&imsf);
253: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Multi-SF\n");
254: PetscSFView(msf,PETSC_VIEWER_STDOUT_WORLD);
255: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Inverse of Multi-SF\n");
256: PetscSFView(imsf,PETSC_VIEWER_STDOUT_WORLD);
257: PetscSFDestroy(&imsf);
258: }
260: /* Clean storage for star forest. */
261: PetscSFDestroy(&sf);
262: PetscFinalize();
263: return 0;
264: }