Actual source code: subcomm.c

petsc-3.7.3 2016-08-01
Report Typos and Errors
  2: /*
  3:      Provides utility routines for split MPI communicator.
  4: */
  5: #include <petscsys.h>    /*I   "petscsys.h"    I*/
  6: #include <petscviewer.h>

  8: const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};

 10: extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
 11: extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);

 15: /*@C
 16:    PetscSubcommSetFromOptions - Allows setting options from a PetscSubcomm

 18:    Collective on PetscSubcomm

 20:    Input Parameter:
 21: .  psubcomm - PetscSubcomm context

 23:    Level: beginner

 25: @*/
 26: PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
 27: {
 28:   PetscErrorCode   ierr;
 29:   PetscSubcommType type;
 30:   PetscBool        flg;

 33:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");

 35:   PetscOptionsBegin(psubcomm->parent,psubcomm->subcommprefix,"Options for PetscSubcomm",NULL);
 36:   PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg);
 37:   if (flg && psubcomm->type != type) {
 38:     /* free old structures */
 39:     PetscCommDestroy(&(psubcomm)->dupparent);
 40:     PetscCommDestroy(&(psubcomm)->child);
 41:     PetscFree((psubcomm)->subsize);
 42:     switch (type) {
 43:     case PETSC_SUBCOMM_GENERAL:
 44:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
 45:     case PETSC_SUBCOMM_CONTIGUOUS:
 46:       PetscSubcommCreate_contiguous(psubcomm);
 47:       break;
 48:     case PETSC_SUBCOMM_INTERLACED:
 49:       PetscSubcommCreate_interlaced(psubcomm);
 50:       break;
 51:     default:
 52:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
 53:     }
 54:   }

 56:   PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg);
 57:   if (flg) {
 58:     PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);
 59:   }
 60:   PetscOptionsEnd();
 61:   return(0);
 62: }

 66: /*@C
 67:   PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for all
 68:   PetscSubcomm items in the options database.

 70:   Logically collective on PetscSubcomm.

 72:   Level: Intermediate

 74:   Input Parameters:
 75: +   psubcomm - PetscSubcomm context
 76: -   prefix - the prefix to prepend all PetscSubcomm item names with.

 78: @*/
 79: PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm,const char pre[])
 80: {
 81:   PetscErrorCode   ierr;

 84:    if (!pre) {
 85:     PetscFree(psubcomm->subcommprefix);
 86:   } else {
 87:     if (pre[0] == '-') SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Options prefix should not begin with a hypen");
 88:     PetscFree(psubcomm->subcommprefix);
 89:     PetscStrallocpy(pre,&(psubcomm->subcommprefix));
 90:   }
 91:   return(0);
 92: }

 96: /*@C
 97:    PetsSubcommcView - Views a PetscSubcomm of values as either ASCII text or a binary file

 99:    Collective on PetscSubcomm

101:    Input Parameter:
102: +  psubcomm - PetscSubcomm context
103: -  viewer - location to view the values

105:    Level: beginner
106: @*/
107: PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
108: {
109:   PetscErrorCode    ierr;
110:   PetscBool         iascii;
111:   PetscViewerFormat format;

114:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
115:   if (iascii) {
116:     PetscViewerGetFormat(viewer,&format);
117:     if (format == PETSC_VIEWER_DEFAULT) {
118:       MPI_Comm    comm=psubcomm->parent;
119:       PetscMPIInt rank,size,subsize,subrank,duprank;

121:       MPI_Comm_size(comm,&size);
122:       PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);
123:       MPI_Comm_rank(comm,&rank);
124:       MPI_Comm_size(psubcomm->child,&subsize);
125:       MPI_Comm_rank(psubcomm->child,&subrank);
126:       MPI_Comm_rank(psubcomm->dupparent,&duprank);
127:       PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);
128:       PetscSynchronizedFlush(comm,PETSC_STDOUT);
129:     }
130:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
131:   return(0);
132: }

136: /*@C
137:   PetscSubcommSetNumber - Set total number of subcommunicators.

139:    Collective on MPI_Comm

141:    Input Parameter:
142: +  psubcomm - PetscSubcomm context
143: -  nsubcomm - the total number of subcommunicators in psubcomm

145:    Level: advanced

147: .keywords: communicator

149: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
150: @*/
151: PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
152: {
154:   MPI_Comm       comm=psubcomm->parent;
155:   PetscMPIInt    msub,size;

158:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
159:   MPI_Comm_size(comm,&size);
160:   PetscMPIIntCast(nsubcomm,&msub);
161:   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);

163:   psubcomm->n = msub;
164:   return(0);
165: }

169: /*@C
170:   PetscSubcommSetType - Set type of subcommunicators.

172:    Collective on MPI_Comm

174:    Input Parameter:
175: +  psubcomm - PetscSubcomm context
176: -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED

178:    Level: advanced

180: .keywords: communicator

182: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
183: @*/
184: PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
185: {

189:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
190:   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);

192:   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
193:     PetscSubcommCreate_contiguous(psubcomm);
194:   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
195:     PetscSubcommCreate_interlaced(psubcomm);
196:   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
197:   return(0);
198: }

202: /*@C
203:   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications

205:    Collective on MPI_Comm

207:    Input Parameter:
208: +  psubcomm - PetscSubcomm context
209: .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
210: -  subrank - rank in the subcommunicator

212:    Level: advanced

214: .keywords: communicator, create

216: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
217: @*/
218: PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
219: {
221:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
222:   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
223:   PetscMPIInt    i,nsubcomm=psubcomm->n;

226:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
227:   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);

229:   MPI_Comm_split(comm,color,subrank,&subcomm);

231:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
232:   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
233:   MPI_Comm_size(comm,&size);
234:   PetscMalloc1(2*size,&recvbuf);

236:   MPI_Comm_rank(comm,&rank);
237:   MPI_Comm_size(subcomm,&mysubsize);

239:   sendbuf[0] = color;
240:   sendbuf[1] = mysubsize;
241:   MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);

243:   PetscCalloc1(nsubcomm,&subsize);
244:   for (i=0; i<2*size; i+=2) {
245:     subsize[recvbuf[i]] = recvbuf[i+1];
246:   }
247:   PetscFree(recvbuf);

249:   duprank = 0;
250:   for (icolor=0; icolor<nsubcomm; icolor++) {
251:     if (icolor != color) { /* not color of this process */
252:       duprank += subsize[icolor];
253:     } else {
254:       duprank += subrank;
255:       break;
256:     }
257:   }
258:   MPI_Comm_split(comm,0,duprank,&dupcomm);

260:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
261:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
262:   MPI_Comm_free(&dupcomm);
263:   MPI_Comm_free(&subcomm);

265:   psubcomm->color   = color;
266:   psubcomm->subsize = subsize;
267:   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
268:   return(0);
269: }

273: /*@C
274:   PetscSubcommDestroy - Destroys a PetscSubcomm object

276:    Collective on PetscSubcomm

278:    Input Parameter:
279:    .  psubcomm - the PetscSubcomm context

281:    Level: advanced

283: .seealso: PetscSubcommCreate(),PetscSubcommSetType()
284: @*/
285: PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
286: {

290:   if (!*psubcomm) return(0);
291:   PetscCommDestroy(&(*psubcomm)->dupparent);
292:   PetscCommDestroy(&(*psubcomm)->child);
293:   PetscFree((*psubcomm)->subsize);
294:   if ((*psubcomm)->subcommprefix) { PetscFree((*psubcomm)->subcommprefix); }
295:   PetscFree((*psubcomm));
296:   return(0);
297: }

301: /*@C
302:   PetscSubcommCreate - Create a PetscSubcomm context.

304:    Collective on MPI_Comm

306:    Input Parameter:
307: .  comm - MPI communicator

309:    Output Parameter:
310: .  psubcomm - location to store the PetscSubcomm context

312:    Level: advanced

314: .keywords: communicator, create

316: .seealso: PetscSubcommDestroy()
317: @*/
318: PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
319: {
321:   PetscMPIInt    rank,size;

324:   PetscNew(psubcomm);

326:   /* set defaults */
327:   MPI_Comm_rank(comm,&rank);
328:   MPI_Comm_size(comm,&size);

330:   (*psubcomm)->parent    = comm;
331:   (*psubcomm)->dupparent = comm;
332:   (*psubcomm)->child     = PETSC_COMM_SELF;
333:   (*psubcomm)->n         = size;
334:   (*psubcomm)->color     = rank;
335:   (*psubcomm)->subsize   = NULL;
336:   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
337:   return(0);
338: }

342: PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
343: {
345:   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
346:   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
347:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

350:   MPI_Comm_rank(comm,&rank);
351:   MPI_Comm_size(comm,&size);

353:   /* get size of each subcommunicator */
354:   PetscMalloc1(1+nsubcomm,&subsize);

356:   np_subcomm = size/nsubcomm;
357:   nleftover  = size - nsubcomm*np_subcomm;
358:   for (i=0; i<nsubcomm; i++) {
359:     subsize[i] = np_subcomm;
360:     if (i<nleftover) subsize[i]++;
361:   }

363:   /* get color and subrank of this proc */
364:   rankstart = 0;
365:   for (i=0; i<nsubcomm; i++) {
366:     if (rank >= rankstart && rank < rankstart+subsize[i]) {
367:       color   = i;
368:       subrank = rank - rankstart;
369:       duprank = rank;
370:       break;
371:     } else rankstart += subsize[i];
372:   }

374:   MPI_Comm_split(comm,color,subrank,&subcomm);

376:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
377:   MPI_Comm_split(comm,0,duprank,&dupcomm);
378:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
379:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
380:   MPI_Comm_free(&dupcomm);
381:   MPI_Comm_free(&subcomm);

383:   psubcomm->color   = color;
384:   psubcomm->subsize = subsize;
385:   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
386:   return(0);
387: }

391: /*
392:    Note:
393:    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
394:    by iteratively taking a process into a subcommunicator.
395:    Example: size=4, nsubcomm=(*psubcomm)->n=3
396:      comm=(*psubcomm)->parent:
397:       rank:     [0]  [1]  [2]  [3]
398:       color:     0    1    2    0

400:      subcomm=(*psubcomm)->comm:
401:       subrank:  [0]  [0]  [0]  [1]

403:      dupcomm=(*psubcomm)->dupparent:
404:       duprank:  [0]  [2]  [3]  [1]

406:      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
407:            subcomm[color = 1] has subsize=1, owns process [1]
408:            subcomm[color = 2] has subsize=1, owns process [2]
409:            dupcomm has same number of processes as comm, and its duprank maps
410:            processes in subcomm contiguously into a 1d array:
411:             duprank: [0] [1]      [2]         [3]
412:             rank:    [0] [3]      [1]         [2]
413:                     subcomm[0] subcomm[1]  subcomm[2]
414: */

416: PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
417: {
419:   PetscMPIInt    rank,size,*subsize,duprank,subrank;
420:   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
421:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

424:   MPI_Comm_rank(comm,&rank);
425:   MPI_Comm_size(comm,&size);

427:   /* get size of each subcommunicator */
428:   PetscMalloc1(1+nsubcomm,&subsize);

430:   np_subcomm = size/nsubcomm;
431:   nleftover  = size - nsubcomm*np_subcomm;
432:   for (i=0; i<nsubcomm; i++) {
433:     subsize[i] = np_subcomm;
434:     if (i<nleftover) subsize[i]++;
435:   }

437:   /* find color for this proc */
438:   color   = rank%nsubcomm;
439:   subrank = rank/nsubcomm;

441:   MPI_Comm_split(comm,color,subrank,&subcomm);

443:   j = 0; duprank = 0;
444:   for (i=0; i<nsubcomm; i++) {
445:     if (j == color) {
446:       duprank += subrank;
447:       break;
448:     }
449:     duprank += subsize[i]; j++;
450:   }

452:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
453:   MPI_Comm_split(comm,0,duprank,&dupcomm);
454:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
455:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
456:   MPI_Comm_free(&dupcomm);
457:   MPI_Comm_free(&subcomm);

459:   psubcomm->color   = color;
460:   psubcomm->subsize = subsize;
461:   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
462:   return(0);
463: }