Actual source code: ex1.c
petsc-3.9.4 2018-09-11
1: static const char help[] = "Test star forest communication (PetscSF)\n\n";
3: /*T
4: Description: A star is a simple tree with one root and zero or more leaves.
5: A star forest is a union of disjoint stars.
6: Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
7: This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
8: T*/
10: /*
11: Include petscsf.h so we can use PetscSF objects. Note that this automatically
12: includes petscsys.h.
13: */
14: #include <petscsf.h>
15: #include <petscviewer.h>
17: int main(int argc,char **argv)
18: {
20: PetscInt i,nroots,nrootsalloc,nleaves,nleavesalloc,*mine,stride;
21: PetscSFNode *remote;
22: PetscMPIInt rank,size;
23: PetscSF sf;
24: PetscBool test_bcast,test_reduce,test_degree,test_fetchandop,test_gather,test_scatter,test_embed,test_invert,test_sf_distribute;
25: MPI_Op mop=MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
26: char opstring[256];
27: PetscBool strflg;
29: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
30: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
31: MPI_Comm_size(PETSC_COMM_WORLD,&size);
33: PetscOptionsBegin(PETSC_COMM_WORLD,"","PetscSF Test Options","none");
34: test_bcast = PETSC_FALSE;
35: PetscOptionsBool("-test_bcast","Test broadcast","",test_bcast,&test_bcast,NULL);
36: test_reduce = PETSC_FALSE;
37: PetscOptionsBool("-test_reduce","Test reduction","",test_reduce,&test_reduce,NULL);
38: mop = MPI_SUM;
39: PetscStrcpy(opstring,"sum");
40: PetscOptionsString("-test_op","Designate which MPI_Op to use","",opstring,opstring,256,NULL);
41: PetscStrcmp("sum",opstring,&strflg);
42: if (strflg) {
43: mop = MPIU_SUM;
44: }
45: PetscStrcmp("prod",opstring,&strflg);
46: if (strflg) {
47: mop = MPI_PROD;
48: }
49: PetscStrcmp("max",opstring,&strflg);
50: if (strflg) {
51: mop = MPI_MAX;
52: }
53: PetscStrcmp("min",opstring,&strflg);
54: if (strflg) {
55: mop = MPI_MIN;
56: }
57: PetscStrcmp("land",opstring,&strflg);
58: if (strflg) {
59: mop = MPI_LAND;
60: }
61: PetscStrcmp("band",opstring,&strflg);
62: if (strflg) {
63: mop = MPI_BAND;
64: }
65: PetscStrcmp("lor",opstring,&strflg);
66: if (strflg) {
67: mop = MPI_LOR;
68: }
69: PetscStrcmp("bor",opstring,&strflg);
70: if (strflg) {
71: mop = MPI_BOR;
72: }
73: PetscStrcmp("lxor",opstring,&strflg);
74: if (strflg) {
75: mop = MPI_LXOR;
76: }
77: PetscStrcmp("bxor",opstring,&strflg);
78: if (strflg) {
79: mop = MPI_BXOR;
80: }
81: test_degree = PETSC_FALSE;
82: PetscOptionsBool("-test_degree","Test computation of vertex degree","",test_degree,&test_degree,NULL);
83: test_fetchandop = PETSC_FALSE;
84: PetscOptionsBool("-test_fetchandop","Test atomic Fetch-And-Op","",test_fetchandop,&test_fetchandop,NULL);
85: test_gather = PETSC_FALSE;
86: PetscOptionsBool("-test_gather","Test point gather","",test_gather,&test_gather,NULL);
87: test_scatter = PETSC_FALSE;
88: PetscOptionsBool("-test_scatter","Test point scatter","",test_scatter,&test_scatter,NULL);
89: test_embed = PETSC_FALSE;
90: PetscOptionsBool("-test_embed","Test point embed","",test_embed,&test_embed,NULL);
91: test_invert = PETSC_FALSE;
92: PetscOptionsBool("-test_invert","Test point invert","",test_invert,&test_invert,NULL);
93: stride = 1;
94: PetscOptionsInt("-stride","Stride for leaf and root data","",stride,&stride,NULL);
95: test_sf_distribute = PETSC_FALSE;
96: PetscOptionsBool("-test_sf_distribute","Create an SF that 'distributes' to each process, like an alltoall","",test_sf_distribute,&test_sf_distribute,NULL);
97: PetscOptionsEnd();
99: if (test_sf_distribute) {
100: nroots = size;
101: nrootsalloc = size;
102: nleaves = size;
103: nleavesalloc = size;
104: mine = NULL;
105: PetscMalloc1(nleaves,&remote);
106: for (i=0; i<size; i++) {
107: remote[i].rank = i;
108: remote[i].index = rank;
109: }
110: } else {
111: nroots = 2 + (PetscInt)(rank == 0);
112: nrootsalloc = nroots * stride;
113: nleaves = 2 + (PetscInt)(rank > 0);
114: nleavesalloc = nleaves * stride;
115: mine = NULL;
116: if (stride > 1) {
117: PetscInt i;
119: PetscMalloc1(nleaves,&mine);
120: for (i = 0; i < nleaves; i++) {
121: mine[i] = stride * i;
122: }
123: }
124: PetscMalloc1(nleaves,&remote);
125: /* Left periodic neighbor */
126: remote[0].rank = (rank+size-1)%size;
127: remote[0].index = 1 * stride;
128: /* Right periodic neighbor */
129: remote[1].rank = (rank+1)%size;
130: remote[1].index = 0 * stride;
131: if (rank > 0) { /* All processes reference rank 0, index 1 */
132: remote[2].rank = 0;
133: remote[2].index = 2 * stride;
134: }
135: }
137: /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
138: PetscSFCreate(PETSC_COMM_WORLD,&sf);
139: PetscSFSetFromOptions(sf);
140: PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
141: PetscSFSetUp(sf);
143: /* View graph, mostly useful for debugging purposes. */
144: PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
145: PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD);
146: PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
149: if (test_bcast) { /* broadcast rootdata into leafdata */
150: PetscInt *rootdata,*leafdata;
151: /* Allocate space for send and recieve buffers. This example communicates PetscInt, but other types, including
152: * user-defined structures, could also be used. */
153: PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
154: /* Set rootdata buffer to be broadcast */
155: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
156: for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
157: /* Initialize local buffer, these values are never used. */
158: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
159: /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
160: PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
161: PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
162: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n");
163: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
164: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n");
165: PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
166: PetscFree2(rootdata,leafdata);
167: }
169: if (test_reduce) { /* Reduce leafdata into rootdata */
170: PetscInt *rootdata,*leafdata;
171: PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
172: /* Initialize rootdata buffer in which the result of the reduction will appear. */
173: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
174: for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
175: /* Set leaf values to reduce. */
176: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
177: for (i=0; i<nleaves; i++) leafdata[i*stride] = 1000*(rank+1) + 10*i;
178: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata\n");
179: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
180: /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
181: * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
182: PetscSFReduceBegin(sf,MPIU_INT,leafdata,rootdata,mop);
183: PetscSFReduceEnd(sf,MPIU_INT,leafdata,rootdata,mop);
184: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata\n");
185: PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
186: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata\n");
187: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
188: PetscFree2(rootdata,leafdata);
189: }
191: if (test_degree) {
192: const PetscInt *degree;
193: PetscSFComputeDegreeBegin(sf,°ree);
194: PetscSFComputeDegreeEnd(sf,°ree);
195: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Root degrees\n");
196: PetscIntView(nrootsalloc,degree,PETSC_VIEWER_STDOUT_WORLD);
197: }
199: if (test_fetchandop) {
200: /* Cannot use text compare here because token ordering is not deterministic */
201: PetscInt *leafdata,*leafupdate,*rootdata;
202: PetscMalloc3(nleavesalloc,&leafdata,nleavesalloc,&leafupdate,nrootsalloc,&rootdata);
203: for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
204: for (i=0; i<nleaves; i++) leafdata[i*stride] = 1;
205: for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
206: for (i=0; i<nroots; i++) rootdata[i*stride] = 0;
207: PetscSFFetchAndOpBegin(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
208: PetscSFFetchAndOpEnd(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
209: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Rootdata (sum of 1 from each leaf)\n");
210: PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
211: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Leafupdate (value at roots prior to my atomic update)\n");
212: PetscIntView(nleavesalloc,leafupdate,PETSC_VIEWER_STDOUT_WORLD);
213: PetscFree3(leafdata,leafupdate,rootdata);
214: }
216: if (test_gather) {
217: const PetscInt *degree;
218: PetscInt inedges,*indata,*outdata;
219: PetscSFComputeDegreeBegin(sf,°ree);
220: PetscSFComputeDegreeEnd(sf,°ree);
221: for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
222: PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
223: for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
224: for (i=0; i<nleaves; i++) outdata[i*stride] = 1000*(rank+1) + i;
225: PetscSFGatherBegin(sf,MPIU_INT,outdata,indata);
226: PetscSFGatherEnd(sf,MPIU_INT,outdata,indata);
227: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Gathered data at multi-roots from leaves\n");
228: PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
229: PetscFree2(indata,outdata);
230: }
232: if (test_scatter) {
233: const PetscInt *degree;
234: PetscInt j,count,inedges,*indata,*outdata;
235: PetscSFComputeDegreeBegin(sf,°ree);
236: PetscSFComputeDegreeEnd(sf,°ree);
237: for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
238: PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
239: for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
240: for (i=0,count=0; i<nrootsalloc; i++) {
241: for (j=0; j<degree[i]; j++) indata[count++] = 1000*(rank+1) + 100*i + j;
242: }
243: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Data at multi-roots, to scatter to leaves\n");
244: PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
246: PetscSFScatterBegin(sf,MPIU_INT,indata,outdata);
247: PetscSFScatterEnd(sf,MPIU_INT,indata,outdata);
248: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Scattered data at leaves\n");
249: PetscIntView(nleavesalloc,outdata,PETSC_VIEWER_STDOUT_WORLD);
250: PetscFree2(indata,outdata);
251: }
253: if (test_embed) {
254: const PetscInt nroots = 1 + (PetscInt) !rank;
255: PetscInt selected[2];
256: PetscSF esf;
258: selected[0] = stride;
259: selected[1] = 2*stride;
260: PetscSFCreateEmbeddedSF(sf,nroots,selected,&esf);
261: PetscSFSetUp(esf);
262: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Embedded PetscSF\n");
263: PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
264: PetscSFView(esf,PETSC_VIEWER_STDOUT_WORLD);
265: PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
266: PetscSFDestroy(&esf);
267: }
269: if (test_invert) {
270: PetscSF msf,imsf;
271: PetscSFGetMultiSF(sf,&msf);
272: PetscSFCreateInverseSF(msf,&imsf);
273: PetscSFSetUp(msf);
274: PetscSFSetUp(imsf);
275: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Multi-SF\n");
276: PetscSFView(msf,PETSC_VIEWER_STDOUT_WORLD);
277: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Inverse of Multi-SF\n");
278: PetscSFView(imsf,PETSC_VIEWER_STDOUT_WORLD);
279: PetscSFDestroy(&imsf);
280: }
282: /* Clean storage for star forest. */
283: PetscSFDestroy(&sf);
284: PetscFinalize();
285: return ierr;
286: }
288: /*TEST
290: test:
291: nsize: 4
292: args: -test_bcast -sf_type window
293: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
296: test:
297: suffix: 2
298: nsize: 4
299: args: -test_reduce -sf_type window
300: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
302: test:
303: suffix: 2_basic
304: nsize: 4
305: args: -test_reduce -sf_type basic
307: test:
308: suffix: 3
309: nsize: 4
310: args: -test_degree -sf_type window
311: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
313: test:
314: suffix: 3_basic
315: nsize: 4
316: args: -test_degree -sf_type basic
318: test:
319: suffix: 4
320: nsize: 4
321: args: -test_gather -sf_type window
322: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
324: test:
325: suffix: 4_basic
326: nsize: 4
327: args: -test_gather -sf_type basic
329: test:
330: suffix: 4_stride
331: nsize: 4
332: args: -test_gather -sf_type basic -stride 2
334: test:
335: suffix: 5
336: nsize: 4
337: args: -test_scatter -sf_type window
338: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
340: test:
341: suffix: 5_basic
342: nsize: 4
343: args: -test_scatter -sf_type basic
345: test:
346: suffix: 5_stride
347: nsize: 4
348: args: -test_scatter -sf_type basic -stride 2
350: test:
351: suffix: 6
352: nsize: 4
353: args: -test_embed -sf_type window
354: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
356: test:
357: suffix: 6_basic
358: nsize: 4
359: args: -test_embed -sf_type basic
361: test:
362: suffix: 7
363: nsize: 4
364: args: -test_invert -sf_type window
365: requires: define(PETSC_HAVE_MPI_WIN_CREATE) define(PETSC_HAVE_MPICH_NUMVERSION)
367: test:
368: suffix: 7_basic
369: nsize: 4
370: args: -test_invert -sf_type basic
372: test:
373: suffix: basic
374: nsize: 4
375: args: -test_bcast -sf_type basic
376: output_file: output/ex1_1_basic.out
378: test:
379: suffix: 8
380: nsize: 3
381: args: -test_bcast -test_sf_distribute -sf_type window
382: requires: define(PETSC_HAVE_MPI_WIN_CREATE)
384: test:
385: suffix: 8_basic
386: nsize: 3
387: args: -test_bcast -test_sf_distribute -sf_type basic
389: TEST*/