Actual source code: ex9.c

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: static char help[]= "This example shows 1) how to transfer vectors from a parent communicator to vectors on a child communicator and vice versa;\n\
  2:   2) how to transfer vectors from a subcommunicator to vectors on another subcommunicator. The two subcommunicators are not\n\
  3:   required to cover all processes in PETSC_COMM_WORLD; 3) how to copy a vector from a parent communicator to vectors on its child communicators.\n\n";

  5: #include <petscvec.h>
  6: int main(int argc,char **argv)
  7: {
  9:   PetscMPIInt    nproc,grank,mycolor;
 10:   PetscInt       i,n,N = 20,low,high;
 11:   MPI_Comm       subcomm;
 12:   Vec            x,yg; /* global vectors on PETSC_COMM_WORLD */
 13:   VecScatter     vscat;
 14:   IS             ix,iy;
 15:   PetscBool      world2sub  = PETSC_FALSE;  /* Copy a vector from WORLD to a subcomm? */
 16:   PetscBool      sub2sub    = PETSC_FALSE;  /* Copy a vector from a subcomm to another subcomm? */
 17:   PetscBool      world2subs = PETSC_FALSE;  /* Copy a vector from WORLD to multiple subcomms? */

 19:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 20:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 21:   MPI_Comm_rank(PETSC_COMM_WORLD,&grank);

 23:   if (nproc < 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_SIZ,"This test must have at least two processes to run");

 25:   PetscOptionsGetBool(NULL,0,"-world2sub",&world2sub,NULL);
 26:   PetscOptionsGetBool(NULL,0,"-sub2sub",&sub2sub,NULL);
 27:   PetscOptionsGetBool(NULL,0,"-world2subs",&world2subs,NULL);

 29:   /* Split PETSC_COMM_WORLD into three subcomms. Each process can only see the subcomm it belongs to */
 30:   mycolor = grank % 3;
 31:   MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&subcomm);

 33:   /*===========================================================================
 34:    *  Transfer a vector x defined on PETSC_COMM_WORLD to a vector y defined on
 35:    *  a subcommunicator of PETSC_COMM_WORLD and vice versa.
 36:    *===========================================================================*/
 37:   if (world2sub) {
 38:     VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,N,&x);
 39:     PetscObjectSetName((PetscObject)x,"x_commworld"); /* Give a name to view x clearly */

 41:     /* Initialize x to [-0.0, -1.0, -2.0, ..., -19.0] */
 42:     VecGetOwnershipRange(x,&low,&high);
 43:     for (i=low; i<high; i++) {
 44:       PetscScalar val = -i;
 45:       VecSetValue(x,i,val,INSERT_VALUES);
 46:     }
 47:     VecAssemblyBegin(x);
 48:     VecAssemblyEnd(x);

 50:     /* Transfer x to a vector y only defined on subcomm0 and vice versa */
 51:     if (mycolor == 0) { /* subcomm0 contains ranks 0, 3, 6, ... in PETSC_COMM_WORLD */
 52:       Vec         y;
 53:       PetscScalar *yvalue;

 55:       VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);
 56:       PetscObjectSetName((PetscObject)y,"y_subcomm_0"); /* Give a name to view y clearly */
 57:       VecGetLocalSize(y,&n);
 58:       VecGetArray(y,&yvalue);
 59:       /* Create yg on PETSC_COMM_WORLD and alias yg with y. They share the memory pointed by yvalue.
 60:         Note this is a collective call. All processes have to call it and supply consistent N.
 61:       */
 62:       VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,yvalue,&yg);

 64:       /* Create an identity map that makes yg[i] = x[i], i=0..N-1 */
 65:       VecGetOwnershipRange(yg,&low,&high); /* low, high are global indices */
 66:       ISCreateStride(PETSC_COMM_SELF,high-low,low,1,&ix);
 67:       ISDuplicate(ix,&iy);

 69:       /* Union of ix's on subcomm0 covers the full range of [0,N) */
 70:       VecScatterCreate(x,ix,yg,iy,&vscat);
 71:       VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);
 72:       VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);

 74:       /* Once yg got the data from x, we return yvalue to y so that we can use y in other operations.
 75:         VecGetArray must be paired with VecRestoreArray.
 76:       */
 77:       VecRestoreArray(y,&yvalue);

 79:       /* Libraries on subcomm0 can safely use y now, for example, view and scale it */
 80:       VecView(y,PETSC_VIEWER_STDOUT_(subcomm));
 81:       VecScale(y,2.0);

 83:       /* Send the new y back to x */
 84:       VecGetArray(y,&yvalue); /* If VecScale is done on GPU, Petsc will prepare a valid yvalue for access */
 85:       /* Supply new yvalue to yg without memory copying */
 86:       VecPlaceArray(yg,yvalue);
 87:       VecScatterBegin(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);
 88:       VecScatterEnd(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);
 89:       VecResetArray(yg);
 90:       VecRestoreArray(y,&yvalue);

 92:       VecDestroy(&y);
 93:     } else {
 94:       /* Ranks outside of subcomm0 do not supply values to yg */
 95:       VecCreateMPIWithArray(PETSC_COMM_WORLD,1,0/*n*/,N,NULL,&yg);

 97:       /* Ranks in subcomm0 already specified the full range of the identity map. The remaining
 98:         ranks just need to create empty ISes to cheat VecScatterCreate.
 99:       */
100:       ISCreateStride(PETSC_COMM_SELF,0,0,1,&ix);
101:       ISDuplicate(ix,&iy);

103:       VecScatterCreate(x,ix,yg,iy,&vscat);
104:       VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);
105:       VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);

107:       /* Send the new y back to x. Ranks outside of subcomm0 actually have nothing to send.
108:         But they have to call VecScatterBegin/End since these routines are collective.
109:       */
110:       VecScatterBegin(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);
111:       VecScatterEnd(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);
112:     }

114:     VecView(x,PETSC_VIEWER_STDOUT_WORLD);
115:     ISDestroy(&ix);
116:     ISDestroy(&iy);
117:     VecDestroy(&x);
118:     VecDestroy(&yg);
119:     VecScatterDestroy(&vscat);
120:   } /* world2sub */

122:   /*===========================================================================
123:    *  Transfer a vector x defined on subcomm0 to a vector y defined on
124:    *  subcomm1. The two subcomms are not overlapping and their union is
125:    *  not necessarily equal to PETSC_COMM_WORLD.
126:    *===========================================================================*/
127:   if (sub2sub) {
128:     if (mycolor == 0) {
129:       /* Intentionally declare N as a local variable so that processes in subcomm1 do not know its value */
130:       PetscInt          n,N = 22;
131:       Vec               x,xg,yg;
132:       IS                ix,iy;
133:       VecScatter        vscat;
134:       const PetscScalar *xvalue;
135:       MPI_Comm          intercomm,parentcomm;
136:       PetscMPIInt       lrank;

138:       MPI_Comm_rank(subcomm,&lrank);
139:       VecCreateMPI(subcomm,PETSC_DECIDE,N,&x); /* x is on subcomm */
140:       VecGetOwnershipRange(x,&low,&high);

142:       /* initialize x = [0.0, 1.0, 2.0, ..., 21.0] */
143:       for (i=low; i<high; i++) {
144:         PetscScalar val = i;
145:         VecSetValue(x,i,val,INSERT_VALUES);
146:       }
147:       VecAssemblyBegin(x);
148:       VecAssemblyEnd(x);

150:       MPI_Intercomm_create(subcomm,0,PETSC_COMM_WORLD/*peer_comm*/,1,100/*tag*/,&intercomm);

152:       /* Tell rank 0 of subcomm1 the global size of x */
153:       if (!lrank) {MPI_Send(&N,1,MPIU_INT,0/*receiver's rank in remote comm, i.e., subcomm1*/,200/*tag*/,intercomm);}

155:       /* Create an intracomm Petsc can work on. Ranks in subcomm0 are ordered before ranks in subcomm1 in parentcomm.
156:         But this order actually does not matter, since what we care is vector y, which is defined on subcomm1.
157:       */
158:       MPI_Intercomm_merge(intercomm,0/*low*/,&parentcomm);

160:       /* Create a vector xg on parentcomm, which shares memory with x */
161:       VecGetArrayRead(x,&xvalue);
162:       VecGetLocalSize(x,&n);
163:       VecCreateMPIWithArray(parentcomm,1,n,N,xvalue,&xg);

165:       /* Ranks in subcomm 0 have nothing on yg, so they simply have n=0, array=NULL */
166:       VecCreateMPIWithArray(parentcomm,1,0/*n*/,N,NULL/*array*/,&yg);

168:       /* Create the vecscatter, which does identity map by setting yg[i] = xg[i], i=0..N-1. */
169:       VecGetOwnershipRange(xg,&low,&high); /* low, high are global indices of xg */
170:       ISCreateStride(PETSC_COMM_SELF,high-low,low,1,&ix);
171:       ISDuplicate(ix,&iy);
172:       VecScatterCreate(xg,ix,yg,iy,&vscat);

174:       /* Scatter values from xg to yg */
175:       VecScatterBegin(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);
176:       VecScatterEnd(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);

178:       /* After the VecScatter is done, xg is idle so we can safely return xvalue to x */
179:       VecRestoreArrayRead(x,&xvalue);
180:       VecDestroy(&x);
181:       ISDestroy(&ix);
182:       ISDestroy(&iy);
183:       VecDestroy(&xg);
184:       VecDestroy(&yg);
185:       VecScatterDestroy(&vscat);
186:       MPI_Comm_free(&intercomm);
187:       MPI_Comm_free(&parentcomm);
188:     } else if (mycolor == 1) { /* subcomm 1, containing ranks 1, 4, 7, ... in PETSC_COMM_WORLD */
189:       PetscInt    n,N;
190:       Vec         y,xg,yg;
191:       IS          ix,iy;
192:       VecScatter  vscat;
193:       PetscScalar *yvalue;
194:       MPI_Comm    intercomm,parentcomm;
195:       PetscMPIInt lrank;

197:       MPI_Comm_rank(subcomm,&lrank);
198:       MPI_Intercomm_create(subcomm,0,PETSC_COMM_WORLD/*peer_comm*/,0/*remote_leader*/,100/*tag*/,&intercomm);

200:       /* Two rank-0 are talking */
201:       if (!lrank) {MPI_Recv(&N,1,MPIU_INT,0/*sender's rank in remote comm, i.e. subcomm0*/,200/*tag*/,intercomm,MPI_STATUS_IGNORE);}
202:       /* Rank 0 of subcomm1 bcasts N to its members */
203:       MPI_Bcast(&N,1,MPIU_INT,0/*local root*/,subcomm);

205:       /* Create a intracomm Petsc can work on */
206:       MPI_Intercomm_merge(intercomm,1/*high*/,&parentcomm);

208:       /* Ranks in subcomm1 have nothing on xg, so they simply have n=0, array=NULL.*/
209:       VecCreateMPIWithArray(parentcomm,1/*bs*/,0/*n*/,N,NULL/*array*/,&xg);

211:       VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);
212:       PetscObjectSetName((PetscObject)y,"y_subcomm_1"); /* Give a name to view y clearly */
213:       VecGetLocalSize(y,&n);
214:       VecGetArray(y,&yvalue);
215:       /* Create a vector yg on parentcomm, which shares memory with y. xg and yg must be
216:         created in the same order in subcomm0/1. For example, we can not reverse the order of
217:         creating xg and yg in subcomm1.
218:       */
219:       VecCreateMPIWithArray(parentcomm,1/*bs*/,n,N,yvalue,&yg);

221:       /* Ranks in subcomm0 already specified the full range of the identity map.
222:         ranks in subcomm1 just need to create empty ISes to cheat VecScatterCreate.
223:       */
224:       ISCreateStride(PETSC_COMM_SELF,0,0,1,&ix);
225:       ISDuplicate(ix,&iy);
226:       VecScatterCreate(xg,ix,yg,iy,&vscat);

228:       /* Scatter values from xg to yg */
229:       VecScatterBegin(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);
230:       VecScatterEnd(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);

232:       /* After the VecScatter is done, values in yg are available. y is our interest, so we return yvalue to y */
233:       VecRestoreArray(y,&yvalue);

235:       /* Libraries on subcomm1 can safely use y now, for example, view it */
236:       VecView(y,PETSC_VIEWER_STDOUT_(subcomm));

238:       VecDestroy(&y);
239:       ISDestroy(&ix);
240:       ISDestroy(&iy);
241:       VecDestroy(&xg);
242:       VecDestroy(&yg);
243:       VecScatterDestroy(&vscat);
244:       MPI_Comm_free(&intercomm);
245:       MPI_Comm_free(&parentcomm);
246:     } else if (mycolor == 2) { /* subcomm2 */
247:       /* Processes in subcomm2 do not participate in the VecScatter. They can freely do unrelated things on subcomm2 */
248:     }
249:   } /* sub2sub */

251:   /*===========================================================================
252:    *  Copy a vector x defined on PETSC_COMM_WORLD to vectors y defined on
253:    *  every subcommunicator of PETSC_COMM_WORLD. We could use multiple transfers
254:    *  as we did in case 1, but that is not efficient. Instead, we use one vecscatter
255:    *  to achieve that.
256:    *===========================================================================*/
257:   if (world2subs) {
258:     Vec         y,yg;
259:     PetscInt    n,N=15,xstart,ystart,low,high;
260:     PetscScalar *yvalue;

262:     /* Initialize x to [0, 1, 2, 3, ..., N-1] */
263:     VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,N,&x);
264:     VecGetOwnershipRange(x,&low,&high);
265:     for (i=low; i<high; i++) {VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);}
266:     VecAssemblyBegin(x);
267:     VecAssemblyEnd(x);

269:     /* Every subcomm has a y as long as x */
270:     VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);
271:     VecGetLocalSize(y,&n);

273:     /* Create a global vector yg on PETSC_COMM_WORLD using y's memory. yg's global size = N*(number of subcommunicators).
274:        Eeach rank in subcomms contributes a piece to construct the global yg. Keep in mind that pieces from a subcomm are not
275:        necessarily consecutive in yg. That depends on how PETSC_COMM_WORLD is split. In our case, subcomm0 is made of rank
276:        0, 3, 6 etc from PETSC_COMM_WORLD. So subcomm0's pieces are interleaved with pieces from other subcomms in yg.
277:     */
278:     VecGetArray(y,&yvalue);
279:     VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,PETSC_DECIDE,yvalue,&yg);
280:     PetscObjectSetName((PetscObject)yg,"yg_on_subcomms"); /* Give a name to view yg clearly */

282:     /* The following two lines are key. From xstart, we know where to pull entries from x. Note that we get xstart from y,
283:        since first entry of y on this rank is from x[xstart]. From ystart, we know where ot put entries to yg.
284:      */
285:     VecGetOwnershipRange(y,&xstart,NULL);
286:     VecGetOwnershipRange(yg,&ystart,NULL);

288:     ISCreateStride(PETSC_COMM_SELF,n,xstart,1,&ix);
289:     ISCreateStride(PETSC_COMM_SELF,n,ystart,1,&iy);
290:     VecScatterCreate(x,ix,yg,iy,&vscat);
291:     VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);
292:     VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);

294:     /* View yg on PETSC_COMM_WORLD before destroying it. We shall see the interleaving effect in output. */
295:     VecView(yg,PETSC_VIEWER_STDOUT_WORLD);
296:     VecDestroy(&yg);

298:     /* Restory yvalue so that processes in subcomm can use y from now on. */
299:     VecRestoreArray(y,&yvalue);
300:     VecScale(y,3.0);

302:     ISDestroy(&ix); /* One can also destroy ix, iy immediately after VecScatterCreate() */
303:     ISDestroy(&iy);
304:     VecDestroy(&x);
305:     VecDestroy(&y);
306:     VecScatterDestroy(&vscat);
307:   } /* world2subs */

309:   MPI_Comm_free(&subcomm);
310:   PetscFinalize();
311:   return ierr;
312: }

314: /*TEST

316:    build:
317:      requires: !define(PETSC_HAVE_MPIUNI)
318:    testset:
319:      nsize: 7

321:      test:
322:        suffix: 1
323:        args: -world2sub

325:      test:
326:        suffix: 2
327:        args: -sub2sub

329:      test:
330:        suffix: 3
331:        args: -world2subs

333:      test:
334:        suffix: 4
335:        args: -world2sub -vecscatter_type sf -sf_type neighbor
336:        output_file: output/ex9_1.out
337:        # OpenMPI has a bug wrt MPI_Neighbor_alltoallv etc (https://github.com/open-mpi/ompi/pull/6782). Once the patch is in, we can remove !define(PETSC_HAVE_OMPI_MAJOR_VERSION)
338:        requires: define(PETSC_HAVE_MPI_NEIGHBORHOOD_COLLECTIVES) !define(PETSC_HAVE_OMPI_MAJOR_VERSION)
339: TEST*/