Actual source code: subcomm.c


  2: /*
  3:      Provides utility routines for split MPI communicator.
  4: */
  5: #include <petscsys.h>
  6: #include <petscviewer.h>

  8: const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",NULL};

 10: static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
 11: static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);

 13: /*@
 14:    PetscSubcommSetFromOptions - Allows setting options from a PetscSubcomm

 16:    Collective on PetscSubcomm

 18:    Input Parameter:
 19: .  psubcomm - PetscSubcomm context

 21:    Level: beginner

 23: @*/
 24: PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
 25: {
 26:   PetscErrorCode   ierr;
 27:   PetscSubcommType type;
 28:   PetscBool        flg;

 31:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");

 33:   PetscOptionsBegin(psubcomm->parent,psubcomm->subcommprefix,"Options for PetscSubcomm",NULL);
 34:   PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg);
 35:   if (flg && psubcomm->type != type) {
 36:     /* free old structures */
 37:     PetscCommDestroy(&(psubcomm)->dupparent);
 38:     PetscCommDestroy(&(psubcomm)->child);
 39:     PetscFree((psubcomm)->subsize);
 40:     switch (type) {
 41:     case PETSC_SUBCOMM_GENERAL:
 42:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
 43:     case PETSC_SUBCOMM_CONTIGUOUS:
 44:       PetscSubcommCreate_contiguous(psubcomm);
 45:       break;
 46:     case PETSC_SUBCOMM_INTERLACED:
 47:       PetscSubcommCreate_interlaced(psubcomm);
 48:       break;
 49:     default:
 50:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
 51:     }
 52:   }

 54:   PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg);
 55:   if (flg) {
 56:     PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_(psubcomm->parent));
 57:   }
 58:   PetscOptionsEnd();
 59:   return(0);
 60: }

 62: /*@C
 63:   PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for all
 64:   PetscSubcomm items in the options database.

 66:   Logically collective on PetscSubcomm.

 68:   Level: Intermediate

 70:   Input Parameters:
 71: +   psubcomm - PetscSubcomm context
 72: -   prefix - the prefix to prepend all PetscSubcomm item names with.

 74: @*/
 75: PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm,const char pre[])
 76: {
 77:   PetscErrorCode   ierr;

 80:    if (!pre) {
 81:     PetscFree(psubcomm->subcommprefix);
 82:   } else {
 83:     if (pre[0] == '-') SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Options prefix should not begin with a hyphen");
 84:     PetscFree(psubcomm->subcommprefix);
 85:     PetscStrallocpy(pre,&(psubcomm->subcommprefix));
 86:   }
 87:   return(0);
 88: }

 90: /*@C
 91:    PetscSubcommView - Views a PetscSubcomm of values as either ASCII text or a binary file

 93:    Collective on PetscSubcomm

 95:    Input Parameters:
 96: +  psubcomm - PetscSubcomm context
 97: -  viewer - location to view the values

 99:    Level: beginner
100: @*/
101: PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
102: {
103:   PetscErrorCode    ierr;
104:   PetscBool         iascii;
105:   PetscViewerFormat format;

108:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
109:   if (iascii) {
110:     PetscViewerGetFormat(viewer,&format);
111:     if (format == PETSC_VIEWER_DEFAULT) {
112:       MPI_Comm    comm=psubcomm->parent;
113:       PetscMPIInt rank,size,subsize,subrank,duprank;

115:       MPI_Comm_size(comm,&size);
116:       PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);
117:       MPI_Comm_rank(comm,&rank);
118:       MPI_Comm_size(psubcomm->child,&subsize);
119:       MPI_Comm_rank(psubcomm->child,&subrank);
120:       MPI_Comm_rank(psubcomm->dupparent,&duprank);
121:       PetscViewerASCIIPushSynchronized(viewer);
122:       PetscViewerASCIISynchronizedPrintf(viewer,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);
123:       PetscViewerFlush(viewer);
124:       PetscViewerASCIIPopSynchronized(viewer);
125:     }
126:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
127:   return(0);
128: }

130: /*@
131:   PetscSubcommSetNumber - Set total number of subcommunicators.

133:    Collective

135:    Input Parameters:
136: +  psubcomm - PetscSubcomm context
137: -  nsubcomm - the total number of subcommunicators in psubcomm

139:    Level: advanced

141: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
142: @*/
143: PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
144: {
146:   MPI_Comm       comm=psubcomm->parent;
147:   PetscMPIInt    msub,size;

150:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
151:   MPI_Comm_size(comm,&size);
152:   PetscMPIIntCast(nsubcomm,&msub);
153:   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);

155:   psubcomm->n = msub;
156:   return(0);
157: }

159: /*@
160:   PetscSubcommSetType - Set type of subcommunicators.

162:    Collective

164:    Input Parameters:
165: +  psubcomm - PetscSubcomm context
166: -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED

168:    Level: advanced

170: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral(), PetscSubcommType
171: @*/
172: PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
173: {

177:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
178:   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);

180:   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
181:     PetscSubcommCreate_contiguous(psubcomm);
182:   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
183:     PetscSubcommCreate_interlaced(psubcomm);
184:   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
185:   return(0);
186: }

188: /*@
189:   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications

191:    Collective

193:    Input Parameters:
194: +  psubcomm - PetscSubcomm context
195: .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
196: -  subrank - rank in the subcommunicator

198:    Level: advanced

200: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
201: @*/
202: PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
203: {
205:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
206:   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
207:   PetscMPIInt    i,nsubcomm=psubcomm->n;

210:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
211:   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);

213:   MPI_Comm_split(comm,color,subrank,&subcomm);

215:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
216:   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
217:   MPI_Comm_size(comm,&size);
218:   PetscMalloc1(2*size,&recvbuf);

220:   MPI_Comm_rank(comm,&rank);
221:   MPI_Comm_size(subcomm,&mysubsize);

223:   sendbuf[0] = color;
224:   sendbuf[1] = mysubsize;
225:   MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);

227:   PetscCalloc1(nsubcomm,&subsize);
228:   for (i=0; i<2*size; i+=2) {
229:     subsize[recvbuf[i]] = recvbuf[i+1];
230:   }
231:   PetscFree(recvbuf);

233:   duprank = 0;
234:   for (icolor=0; icolor<nsubcomm; icolor++) {
235:     if (icolor != color) { /* not color of this process */
236:       duprank += subsize[icolor];
237:     } else {
238:       duprank += subrank;
239:       break;
240:     }
241:   }
242:   MPI_Comm_split(comm,0,duprank,&dupcomm);

244:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
245:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
246:   MPI_Comm_free(&dupcomm);
247:   MPI_Comm_free(&subcomm);

249:   psubcomm->color   = color;
250:   psubcomm->subsize = subsize;
251:   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
252:   return(0);
253: }

255: /*@
256:   PetscSubcommDestroy - Destroys a PetscSubcomm object

258:    Collective on PetscSubcomm

260:    Input Parameter:
261:    .  psubcomm - the PetscSubcomm context

263:    Level: advanced

265: .seealso: PetscSubcommCreate(),PetscSubcommSetType()
266: @*/
267: PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
268: {

272:   if (!*psubcomm) return(0);
273:   PetscCommDestroy(&(*psubcomm)->dupparent);
274:   PetscCommDestroy(&(*psubcomm)->child);
275:   PetscFree((*psubcomm)->subsize);
276:   if ((*psubcomm)->subcommprefix) { PetscFree((*psubcomm)->subcommprefix); }
277:   PetscFree((*psubcomm));
278:   return(0);
279: }

281: /*@
282:   PetscSubcommCreate - Create a PetscSubcomm context.

284:    Collective

286:    Input Parameter:
287: .  comm - MPI communicator

289:    Output Parameter:
290: .  psubcomm - location to store the PetscSubcomm context

292:    Level: advanced

294: .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(),
295:           PetscSubcommSetNumber()
296: @*/
297: PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
298: {
300:   PetscMPIInt    rank,size;

303:   PetscNew(psubcomm);

305:   /* set defaults */
306:   MPI_Comm_rank(comm,&rank);
307:   MPI_Comm_size(comm,&size);

309:   (*psubcomm)->parent    = comm;
310:   (*psubcomm)->dupparent = comm;
311:   (*psubcomm)->child     = PETSC_COMM_SELF;
312:   (*psubcomm)->n         = size;
313:   (*psubcomm)->color     = rank;
314:   (*psubcomm)->subsize   = NULL;
315:   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
316:   return(0);
317: }

319: /*@C
320:   PetscSubcommGetParent - Gets the communicator that was used to create the PetscSubcomm

322:    Collective

324:    Input Parameter:
325: .  scomm - the PetscSubcomm

327:    Output Parameter:
328: .  pcomm - location to store the parent communicator

330:    Level: intermediate

332: .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(),
333:           PetscSubcommSetNumber(), PetscSubcommGetChild(), PetscSubcommContiguousParent()
334: @*/
335: PetscErrorCode  PetscSubcommGetParent(PetscSubcomm scomm,MPI_Comm *pcomm)
336: {
337:   *pcomm = PetscSubcommParent(scomm);
338:   return 0;
339: }

341: /*@C
342:   PetscSubcommGetContiguousParent - Gets a communicator that that is a duplicate of the parent but has the ranks
343:                                     reordered by the order they are in the children

345:    Collective

347:    Input Parameter:
348: .  scomm - the PetscSubcomm

350:    Output Parameter:
351: .  pcomm - location to store the parent communicator

353:    Level: intermediate

355: .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(),
356:           PetscSubcommSetNumber(), PetscSubcommGetChild(), PetscSubcommContiguousParent()
357: @*/
358: PetscErrorCode  PetscSubcommGetContiguousParent(PetscSubcomm scomm,MPI_Comm *pcomm)
359: {
360:   *pcomm = PetscSubcommContiguousParent(scomm);
361:   return 0;
362: }

364: /*@C
365:   PetscSubcommGetChild - Gets the communicator created by the PetscSubcomm

367:    Collective

369:    Input Parameter:
370: .  scomm - the PetscSubcomm

372:    Output Parameter:
373: .  ccomm - location to store the child communicator

375:    Level: intermediate

377: .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(),
378:           PetscSubcommSetNumber(), PetscSubcommGetParent(), PetscSubcommContiguousParent()
379: @*/
380: PetscErrorCode  PetscSubcommGetChild(PetscSubcomm scomm,MPI_Comm *ccomm)
381: {
382:   *ccomm = PetscSubcommChild(scomm);
383:   return 0;
384: }

386: static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
387: {
389:   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
390:   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
391:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

394:   MPI_Comm_rank(comm,&rank);
395:   MPI_Comm_size(comm,&size);

397:   /* get size of each subcommunicator */
398:   PetscMalloc1(1+nsubcomm,&subsize);

400:   np_subcomm = size/nsubcomm;
401:   nleftover  = size - nsubcomm*np_subcomm;
402:   for (i=0; i<nsubcomm; i++) {
403:     subsize[i] = np_subcomm;
404:     if (i<nleftover) subsize[i]++;
405:   }

407:   /* get color and subrank of this proc */
408:   rankstart = 0;
409:   for (i=0; i<nsubcomm; i++) {
410:     if (rank >= rankstart && rank < rankstart+subsize[i]) {
411:       color   = i;
412:       subrank = rank - rankstart;
413:       duprank = rank;
414:       break;
415:     } else rankstart += subsize[i];
416:   }

418:   MPI_Comm_split(comm,color,subrank,&subcomm);

420:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
421:   MPI_Comm_split(comm,0,duprank,&dupcomm);
422:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
423:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
424:   MPI_Comm_free(&dupcomm);
425:   MPI_Comm_free(&subcomm);

427:   psubcomm->color   = color;
428:   psubcomm->subsize = subsize;
429:   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
430:   return(0);
431: }

433: /*
434:    Note:
435:    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
436:    by iteratively taking a process into a subcommunicator.
437:    Example: size=4, nsubcomm=(*psubcomm)->n=3
438:      comm=(*psubcomm)->parent:
439:       rank:     [0]  [1]  [2]  [3]
440:       color:     0    1    2    0

442:      subcomm=(*psubcomm)->comm:
443:       subrank:  [0]  [0]  [0]  [1]

445:      dupcomm=(*psubcomm)->dupparent:
446:       duprank:  [0]  [2]  [3]  [1]

448:      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
449:            subcomm[color = 1] has subsize=1, owns process [1]
450:            subcomm[color = 2] has subsize=1, owns process [2]
451:            dupcomm has same number of processes as comm, and its duprank maps
452:            processes in subcomm contiguously into a 1d array:
453:             duprank: [0] [1]      [2]         [3]
454:             rank:    [0] [3]      [1]         [2]
455:                     subcomm[0] subcomm[1]  subcomm[2]
456: */

458: static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
459: {
461:   PetscMPIInt    rank,size,*subsize,duprank,subrank;
462:   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
463:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

466:   MPI_Comm_rank(comm,&rank);
467:   MPI_Comm_size(comm,&size);

469:   /* get size of each subcommunicator */
470:   PetscMalloc1(1+nsubcomm,&subsize);

472:   np_subcomm = size/nsubcomm;
473:   nleftover  = size - nsubcomm*np_subcomm;
474:   for (i=0; i<nsubcomm; i++) {
475:     subsize[i] = np_subcomm;
476:     if (i<nleftover) subsize[i]++;
477:   }

479:   /* find color for this proc */
480:   color   = rank%nsubcomm;
481:   subrank = rank/nsubcomm;

483:   MPI_Comm_split(comm,color,subrank,&subcomm);

485:   j = 0; duprank = 0;
486:   for (i=0; i<nsubcomm; i++) {
487:     if (j == color) {
488:       duprank += subrank;
489:       break;
490:     }
491:     duprank += subsize[i]; j++;
492:   }

494:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
495:   MPI_Comm_split(comm,0,duprank,&dupcomm);
496:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
497:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
498:   MPI_Comm_free(&dupcomm);
499:   MPI_Comm_free(&subcomm);

501:   psubcomm->color   = color;
502:   psubcomm->subsize = subsize;
503:   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
504:   return(0);
505: }