Actual source code: ex1.c

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: static const char help[] = "Test star forest communication (PetscSF)\n\n";

  3: /*T
  4:     Description: A star is a simple tree with one root and zero or more leaves.
  5:     A star forest is a union of disjoint stars.
  6:     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
  7:     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
  8: T*/

 10: /*
 11:   Include petscsf.h so we can use PetscSF objects. Note that this automatically
 12:   includes petscsys.h.
 13: */
 14: #include <petscsf.h>
 15: #include <petscviewer.h>

 17: /* like PetscSFView() but with alternative array of local indices */
 18: static PetscErrorCode PetscSFViewCustomLocals_Private(PetscSF sf,const PetscInt locals[],PetscViewer viewer)
 19: {
 20:   const PetscSFNode *iremote;
 21:   PetscInt          i,nroots,nleaves,nranks;
 22:   PetscMPIInt       rank;
 23:   PetscErrorCode    ierr;

 26:   MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
 27:   PetscSFGetGraph(sf,&nroots,&nleaves,NULL,&iremote);
 28:   PetscSFGetRootRanks(sf,&nranks,NULL,NULL,NULL,NULL);
 29:   PetscViewerASCIIPushTab(viewer);
 30:   PetscViewerASCIIPushSynchronized(viewer);
 31:   PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,nroots,nleaves,nranks);
 32:   for (i=0; i<nleaves; i++) {
 33:     PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,locals[i],iremote[i].rank,iremote[i].index);
 34:   }
 35:   PetscViewerFlush(viewer);
 36:   PetscViewerASCIIPopTab(viewer);
 37:   PetscViewerASCIIPopSynchronized(viewer);
 38:   return(0);
 39: }

 41: int main(int argc,char **argv)
 42: {
 44:   PetscInt       i,nroots,nrootsalloc,nleaves,nleavesalloc,*mine,stride;
 45:   PetscSFNode    *remote;
 46:   PetscMPIInt    rank,size;
 47:   PetscSF        sf;
 48:   PetscBool      test_all,test_bcast,test_bcastop,test_reduce,test_degree,test_fetchandop,test_gather,test_scatter,test_embed,test_invert,test_sf_distribute,test_char;
 49:   MPI_Op         mop=MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
 50:   char           opstring[256];
 51:   PetscBool      strflg;

 53:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 54:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 55:   MPI_Comm_size(PETSC_COMM_WORLD,&size);

 57:   PetscOptionsBegin(PETSC_COMM_WORLD,"","PetscSF Test Options","none");
 58:   test_all        = PETSC_FALSE;
 59:   PetscOptionsBool("-test_all","Test all SF communications","",test_all,&test_all,NULL);
 60:   test_bcast      = test_all;
 61:   PetscOptionsBool("-test_bcast","Test broadcast","",test_bcast,&test_bcast,NULL);
 62:   test_bcastop    = test_all;
 63:   PetscOptionsBool("-test_bcastop","Test broadcast and reduce","",test_bcastop,&test_bcastop,NULL);
 64:   test_reduce     = test_all;
 65:   PetscOptionsBool("-test_reduce","Test reduction","",test_reduce,&test_reduce,NULL);
 66:   test_char       = test_all;
 67:   PetscOptionsBool("-test_char","Test signed char, unsigned char, and char","",test_char,&test_char,NULL);
 68:   mop             = MPI_SUM;
 69:   PetscStrcpy(opstring,"sum");
 70:   PetscOptionsString("-test_op","Designate which MPI_Op to use","",opstring,opstring,sizeof(opstring),NULL);
 71:   PetscStrcmp("sum",opstring,&strflg);
 72:   if (strflg) {
 73:     mop = MPIU_SUM;
 74:   }
 75:   PetscStrcmp("prod",opstring,&strflg);
 76:   if (strflg) {
 77:     mop = MPI_PROD;
 78:   }
 79:   PetscStrcmp("max",opstring,&strflg);
 80:   if (strflg) {
 81:     mop = MPI_MAX;
 82:   }
 83:   PetscStrcmp("min",opstring,&strflg);
 84:   if (strflg) {
 85:     mop = MPI_MIN;
 86:   }
 87:   PetscStrcmp("land",opstring,&strflg);
 88:   if (strflg) {
 89:     mop = MPI_LAND;
 90:   }
 91:   PetscStrcmp("band",opstring,&strflg);
 92:   if (strflg) {
 93:     mop = MPI_BAND;
 94:   }
 95:   PetscStrcmp("lor",opstring,&strflg);
 96:   if (strflg) {
 97:     mop = MPI_LOR;
 98:   }
 99:   PetscStrcmp("bor",opstring,&strflg);
100:   if (strflg) {
101:     mop = MPI_BOR;
102:   }
103:   PetscStrcmp("lxor",opstring,&strflg);
104:   if (strflg) {
105:     mop = MPI_LXOR;
106:   }
107:   PetscStrcmp("bxor",opstring,&strflg);
108:   if (strflg) {
109:     mop = MPI_BXOR;
110:   }
111:   test_degree     = test_all;
112:   PetscOptionsBool("-test_degree","Test computation of vertex degree","",test_degree,&test_degree,NULL);
113:   test_fetchandop = test_all;
114:   PetscOptionsBool("-test_fetchandop","Test atomic Fetch-And-Op","",test_fetchandop,&test_fetchandop,NULL);
115:   test_gather     = test_all;
116:   PetscOptionsBool("-test_gather","Test point gather","",test_gather,&test_gather,NULL);
117:   test_scatter    = test_all;
118:   PetscOptionsBool("-test_scatter","Test point scatter","",test_scatter,&test_scatter,NULL);
119:   test_embed      = test_all;
120:   PetscOptionsBool("-test_embed","Test point embed","",test_embed,&test_embed,NULL);
121:   test_invert     = test_all;
122:   PetscOptionsBool("-test_invert","Test point invert","",test_invert,&test_invert,NULL);
123:   stride          = 1;
124:   PetscOptionsInt("-stride","Stride for leaf and root data","",stride,&stride,NULL);
125:   test_sf_distribute = PETSC_FALSE;
126:   PetscOptionsBool("-test_sf_distribute","Create an SF that 'distributes' to each process, like an alltoall","",test_sf_distribute,&test_sf_distribute,NULL);
127:   PetscOptionsString("-test_op","Designate which MPI_Op to use","",opstring,opstring,sizeof(opstring),NULL);
128:   PetscOptionsEnd();

130:   if (test_sf_distribute) {
131:     nroots = size;
132:     nrootsalloc = size;
133:     nleaves = size;
134:     nleavesalloc = size;
135:     mine = NULL;
136:     PetscMalloc1(nleaves,&remote);
137:     for (i=0; i<size; i++) {
138:       remote[i].rank = i;
139:       remote[i].index = rank;
140:     }
141:   } else {
142:     nroots       = 2 + (PetscInt)(rank == 0);
143:     nrootsalloc  = nroots * stride;
144:     nleaves      = 2 + (PetscInt)(rank > 0);
145:     nleavesalloc = nleaves * stride;
146:     mine         = NULL;
147:     if (stride > 1) {
148:       PetscInt i;

150:       PetscMalloc1(nleaves,&mine);
151:       for (i = 0; i < nleaves; i++) {
152:         mine[i] = stride * i;
153:       }
154:     }
155:     PetscMalloc1(nleaves,&remote);
156:     /* Left periodic neighbor */
157:     remote[0].rank  = (rank+size-1)%size;
158:     remote[0].index = 1 * stride;
159:     /* Right periodic neighbor */
160:     remote[1].rank  = (rank+1)%size;
161:     remote[1].index = 0 * stride;
162:     if (rank > 0) {               /* All processes reference rank 0, index 1 */
163:       remote[2].rank  = 0;
164:       remote[2].index = 2 * stride;
165:     }
166:   }

168:   /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
169:   PetscSFCreate(PETSC_COMM_WORLD,&sf);
170:   PetscSFSetFromOptions(sf);
171:   PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
172:   PetscSFSetUp(sf);

174:   /* View graph, mostly useful for debugging purposes. */
175:   PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
176:   PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD);
177:   PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);

179:   if (test_bcast) {             /* broadcast rootdata into leafdata */
180:     PetscInt *rootdata,*leafdata;
181:     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
182:      * user-defined structures, could also be used. */
183:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
184:     /* Set rootdata buffer to be broadcast */
185:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
186:     for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
187:     /* Initialize local buffer, these values are never used. */
188:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
189:     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
190:     PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
191:     PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
192:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n");
193:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
194:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n");
195:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
196:     PetscFree2(rootdata,leafdata);
197:   }

199:   if (test_bcast && test_char) { /* Bcast with char */
200:     PetscInt len;
201:     char buf[256];
202:     char *rootdata,*leafdata;
203:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
204:     /* Set rootdata buffer to be broadcast */
205:     for (i=0; i<nrootsalloc; i++) rootdata[i] = '*';
206:     for (i=0; i<nroots; i++) rootdata[i*stride] = 'A' + rank*3 + i; /* rank is very small, so it is fine to compute a char */
207:     /* Initialize local buffer, these values are never used. */
208:     for (i=0; i<nleavesalloc; i++) leafdata[i] = '?';

210:     PetscSFBcastBegin(sf,MPI_CHAR,rootdata,leafdata);
211:     PetscSFBcastEnd(sf,MPI_CHAR,rootdata,leafdata);

213:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata in type of char\n");
214:     len  = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
215:     for (i=0; i<nrootsalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5c",rootdata[i]); len += 5;}
216:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
217:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

219:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata in type of char\n");
220:     len = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
221:     for (i=0; i<nleavesalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5c",leafdata[i]); len += 5;}
222:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
223:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

225:     PetscFree2(rootdata,leafdata);
226:   }

228:   if (test_bcastop) {         /* Reduce rootdata into leafdata */
229:     PetscInt *rootdata,*leafdata;
230:     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
231:      * user-defined structures, could also be used. */
232:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
233:     /* Set rootdata buffer to be broadcast */
234:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
235:     for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
236:     /* Set leaf values to reduce with */
237:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -10*(rank+1) - i;
238:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-BcastAndOp Leafdata\n");
239:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
240:     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
241:     PetscSFBcastAndOpBegin(sf,MPIU_INT,rootdata,leafdata,mop);
242:     PetscSFBcastAndOpEnd(sf,MPIU_INT,rootdata,leafdata,mop);
243:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## BcastAndOp Rootdata\n");
244:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
245:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## BcastAndOp Leafdata\n");
246:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
247:     PetscFree2(rootdata,leafdata);
248:   }

250:   if (test_reduce) {            /* Reduce leafdata into rootdata */
251:     PetscInt *rootdata,*leafdata;
252:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
253:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
254:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
255:     for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
256:     /* Set leaf values to reduce. */
257:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
258:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 1000*(rank+1) + 10*i;
259:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata\n");
260:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
261:     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
262:      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
263:     PetscSFReduceBegin(sf,MPIU_INT,leafdata,rootdata,mop);
264:     PetscSFReduceEnd(sf,MPIU_INT,leafdata,rootdata,mop);
265:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata\n");
266:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
267:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata\n");
268:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
269:     PetscFree2(rootdata,leafdata);
270:   }

272:   if (test_reduce && test_char) { /* Reduce with signed char */
273:     PetscInt len;
274:     char buf[256];
275:     signed char *rootdata,*leafdata;
276:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
277:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
278:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
279:     for (i=0; i<nroots; i++) rootdata[i*stride] = 10*(rank+1) + i;
280:     /* Set leaf values to reduce. */
281:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
282:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 50*(rank+1) + 10*i;
283:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata in type of signed char\n");

285:     len = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
286:     for (i=0; i<nrootsalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5d",rootdata[i]); len += 5;}
287:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
288:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

290:     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
291:        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
292:      */
293:     PetscSFReduceBegin(sf,MPI_SIGNED_CHAR,leafdata,rootdata,mop);
294:     PetscSFReduceEnd(sf,MPI_SIGNED_CHAR,leafdata,rootdata,mop);

296:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata in type of signed char\n");
297:     len  = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
298:     for (i=0; i<nleavesalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5d",leafdata[i]); len += 5;}
299:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
300:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

302:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata in type of signed char\n");
303:     len = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
304:     for (i=0; i<nrootsalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5d",rootdata[i]); len += 5;}
305:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
306:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

308:     PetscFree2(rootdata,leafdata);
309:   }

311:   if (test_reduce && test_char) { /* Reduce with unsigned char */
312:     PetscInt len;
313:     char buf[256];
314:     unsigned char *rootdata,*leafdata;
315:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
316:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
317:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
318:     for (i=0; i<nroots; i++) rootdata[i*stride] = 10*(rank+1) + i;
319:     /* Set leaf values to reduce. */
320:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
321:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 50*(rank+1) + 10*i;
322:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata in type of unsigned char\n");

324:     len = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
325:     for (i=0; i<nrootsalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5u",rootdata[i]); len += 5;}
326:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
327:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

329:     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
330:        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
331:      */
332:     PetscSFReduceBegin(sf,MPI_UNSIGNED_CHAR,leafdata,rootdata,mop);
333:     PetscSFReduceEnd(sf,MPI_UNSIGNED_CHAR,leafdata,rootdata,mop);

335:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata in type of unsigned char\n");
336:     len  = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
337:     for (i=0; i<nleavesalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5u",leafdata[i]); len += 5;}
338:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
339:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

341:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata in type of unsigned char\n");
342:     len = 0; PetscSNPrintf(buf,256,"%4d:",rank); len += 5;
343:     for (i=0; i<nrootsalloc; i++) {PetscSNPrintf(buf+len,256-len,"%5u",rootdata[i]); len += 5;}
344:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%s\n",buf);
345:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

347:     PetscFree2(rootdata,leafdata);
348:   }

350:   if (test_degree) {
351:     const PetscInt *degree;
352:     PetscSFComputeDegreeBegin(sf,&degree);
353:     PetscSFComputeDegreeEnd(sf,&degree);
354:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Root degrees\n");
355:     PetscIntView(nrootsalloc,degree,PETSC_VIEWER_STDOUT_WORLD);
356:   }

358:   if (test_fetchandop) {
359:     /* Cannot use text compare here because token ordering is not deterministic */
360:     PetscInt *leafdata,*leafupdate,*rootdata;
361:     PetscMalloc3(nleavesalloc,&leafdata,nleavesalloc,&leafupdate,nrootsalloc,&rootdata);
362:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
363:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 1;
364:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
365:     for (i=0; i<nroots; i++) rootdata[i*stride] = 0;
366:     PetscSFFetchAndOpBegin(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
367:     PetscSFFetchAndOpEnd(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
368:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Rootdata (sum of 1 from each leaf)\n");
369:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
370:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Leafupdate (value at roots prior to my atomic update)\n");
371:     PetscIntView(nleavesalloc,leafupdate,PETSC_VIEWER_STDOUT_WORLD);
372:     PetscFree3(leafdata,leafupdate,rootdata);
373:   }

375:   if (test_gather) {
376:     const PetscInt *degree;
377:     PetscInt       inedges,*indata,*outdata;
378:     PetscSFComputeDegreeBegin(sf,&degree);
379:     PetscSFComputeDegreeEnd(sf,&degree);
380:     for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
381:     PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
382:     for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
383:     for (i=0; i<nleaves; i++) outdata[i*stride] = 1000*(rank+1) + i;
384:     PetscSFGatherBegin(sf,MPIU_INT,outdata,indata);
385:     PetscSFGatherEnd(sf,MPIU_INT,outdata,indata);
386:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Gathered data at multi-roots from leaves\n");
387:     PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
388:     PetscFree2(indata,outdata);
389:   }

391:   if (test_scatter) {
392:     const PetscInt *degree;
393:     PetscInt       j,count,inedges,*indata,*outdata;
394:     PetscSFComputeDegreeBegin(sf,&degree);
395:     PetscSFComputeDegreeEnd(sf,&degree);
396:     for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
397:     PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
398:     for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
399:     for (i=0,count=0; i<nrootsalloc; i++) {
400:       for (j=0; j<degree[i]; j++) indata[count++] = 1000*(rank+1) + 100*i + j;
401:     }
402:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Data at multi-roots, to scatter to leaves\n");
403:     PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);

405:     PetscSFScatterBegin(sf,MPIU_INT,indata,outdata);
406:     PetscSFScatterEnd(sf,MPIU_INT,indata,outdata);
407:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Scattered data at leaves\n");
408:     PetscIntView(nleavesalloc,outdata,PETSC_VIEWER_STDOUT_WORLD);
409:     PetscFree2(indata,outdata);
410:   }

412:   if (test_embed) {
413:     const PetscInt nroots = 1 + (PetscInt) !rank;
414:     PetscInt       selected[2];
415:     PetscSF        esf;

417:     selected[0] = stride;
418:     selected[1] = 2*stride;
419:     PetscSFCreateEmbeddedSF(sf,nroots,selected,&esf);
420:     PetscSFSetUp(esf);
421:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Embedded PetscSF\n");
422:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
423:     PetscSFView(esf,PETSC_VIEWER_STDOUT_WORLD);
424:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
425:     PetscSFDestroy(&esf);
426:   }

428:   if (test_invert) {
429:     const PetscInt *degree;
430:     PetscInt *mRootsOrigNumbering;
431:     PetscInt inedges;
432:     PetscSF msf,imsf;

434:     PetscSFGetMultiSF(sf,&msf);
435:     PetscSFCreateInverseSF(msf,&imsf);
436:     PetscSFSetUp(msf);
437:     PetscSFSetUp(imsf);
438:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Multi-SF\n");
439:     PetscSFView(msf,PETSC_VIEWER_STDOUT_WORLD);
440:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Multi-SF roots indices in original SF roots numbering\n");
441:     PetscSFComputeDegreeBegin(sf,&degree);
442:     PetscSFComputeDegreeEnd(sf,&degree);
443:     PetscSFComputeMultiRootOriginalNumbering(sf,degree,&inedges,&mRootsOrigNumbering);
444:     PetscIntView(inedges,mRootsOrigNumbering,PETSC_VIEWER_STDOUT_WORLD);
445:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Inverse of Multi-SF\n");
446:     PetscSFView(imsf,PETSC_VIEWER_STDOUT_WORLD);
447:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Inverse of Multi-SF, original numbering\n");
448:     PetscSFViewCustomLocals_Private(imsf,mRootsOrigNumbering,PETSC_VIEWER_STDOUT_WORLD);
449:     PetscSFDestroy(&imsf);
450:     PetscFree(mRootsOrigNumbering);
451:   }

453:   /* Clean storage for star forest. */
454:   PetscSFDestroy(&sf);
455:   PetscFinalize();
456:   return ierr;
457: }

459: /*TEST

461:    test:
462:       nsize: 4
463:       filter: grep -v "type" | grep -v "sort"
464:       args: -test_bcast -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
465:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)


468:    test:
469:       suffix: 2
470:       nsize: 4
471:       filter: grep -v "type" | grep -v "sort"
472:       args: -test_reduce -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
473:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

475:    test:
476:       suffix: 2_basic
477:       nsize: 4
478:       args: -test_reduce -sf_type basic

480:    test:
481:       suffix: 3
482:       nsize: 4
483:       filter: grep -v "type" | grep -v "sort"
484:       args: -test_degree -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
485:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

487:    test:
488:       suffix: 3_basic
489:       nsize: 4
490:       args: -test_degree -sf_type basic

492:    test:
493:       suffix: 4
494:       nsize: 4
495:       filter: grep -v "type" | grep -v "sort"
496:       args: -test_gather -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
497:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

499:    test:
500:       suffix: 4_basic
501:       nsize: 4
502:       args: -test_gather -sf_type basic

504:    test:
505:       suffix: 4_stride
506:       nsize: 4
507:       args: -test_gather -sf_type basic -stride 2

509:    test:
510:       suffix: 5
511:       nsize: 4
512:       filter: grep -v "type" | grep -v "sort"
513:       args: -test_scatter -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
514:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

516:    test:
517:       suffix: 5_basic
518:       nsize: 4
519:       args: -test_scatter -sf_type basic

521:    test:
522:       suffix: 5_stride
523:       nsize: 4
524:       args: -test_scatter -sf_type basic -stride 2

526:    test:
527:       suffix: 6
528:       nsize: 4
529:       filter: grep -v "type" | grep -v "sort"
530:       # No -sf_window_flavor dynamic due to bug https://gitlab.com/petsc/petsc/issues/555
531:       args: -test_embed -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}}
532:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

534:    test:
535:       suffix: 6_basic
536:       nsize: 4
537:       args: -test_embed -sf_type basic

539:    test:
540:       suffix: 7
541:       nsize: 4
542:       filter: grep -v "type" | grep -v "sort"
543:       args: -test_invert -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
544:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

546:    test:
547:       suffix: 7_basic
548:       nsize: 4
549:       args: -test_invert -sf_type basic

551:    test:
552:       suffix: basic
553:       nsize: 4
554:       args: -test_bcast -sf_type basic
555:       output_file: output/ex1_1_basic.out

557:    test:
558:       suffix: bcastop_basic
559:       nsize: 4
560:       args: -test_bcastop -sf_type basic
561:       output_file: output/ex1_bcastop_basic.out

563:    test:
564:       suffix: 8
565:       nsize: 3
566:       filter: grep -v "type" | grep -v "sort"
567:       args: -test_bcast -test_sf_distribute -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
568:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

570:    test:
571:       suffix: 8_basic
572:       nsize: 3
573:       args: -test_bcast -test_sf_distribute -sf_type basic

575:    test:
576:       suffix: 9_char
577:       nsize: 4
578:       args: -sf_type basic -test_bcast -test_reduce -test_op max -test_char

580:    # Here we do not test -sf_window_flavor dynamic since it is designed for repeated SFs with few different rootdata pointers
581:    test:
582:       suffix: 10
583:       filter: grep -v "type" | grep -v "sort"
584:       nsize: 4
585:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}} -test_all -test_bcastop 0 -test_fetchandop 0
586:       requires: define(PETSC_HAVE_MPI_ONE_SIDED)

588:    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
589:    test:
590:       suffix: 10_shared
591:       output_file: output/ex1_10.out
592:       filter: grep -v "type" | grep -v "sort"
593:       nsize: 4
594:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0
595:       requires: define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !define(PETSC_HAVE_MPICH_NUMVERSION) define(PETSC_HAVE_MPI_ONE_SIDED)

597:    test:
598:       suffix: 10_basic
599:       nsize: 4
600:       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0

602: TEST*/