Actual source code: subcomm.c

petsc-3.12.5 2020-03-29
Report Typos and Errors

  2: /*
  3:      Provides utility routines for split MPI communicator.
  4: */
  5:  #include <petscsys.h>
  6:  #include <petscviewer.h>

  8: const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};

 10: static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
 11: static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);

 13: /*@C
 14:    PetscSubcommSetFromOptions - Allows setting options from a PetscSubcomm

 16:    Collective on PetscSubcomm

 18:    Input Parameter:
 19: .  psubcomm - PetscSubcomm context

 21:    Level: beginner

 23: @*/
 24: PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
 25: {
 26:   PetscErrorCode   ierr;
 27:   PetscSubcommType type;
 28:   PetscBool        flg;

 31:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");

 33:   PetscOptionsBegin(psubcomm->parent,psubcomm->subcommprefix,"Options for PetscSubcomm",NULL);
 34:   PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg);
 35:   if (flg && psubcomm->type != type) {
 36:     /* free old structures */
 37:     PetscCommDestroy(&(psubcomm)->dupparent);
 38:     PetscCommDestroy(&(psubcomm)->child);
 39:     PetscFree((psubcomm)->subsize);
 40:     switch (type) {
 41:     case PETSC_SUBCOMM_GENERAL:
 42:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
 43:     case PETSC_SUBCOMM_CONTIGUOUS:
 44:       PetscSubcommCreate_contiguous(psubcomm);
 45:       break;
 46:     case PETSC_SUBCOMM_INTERLACED:
 47:       PetscSubcommCreate_interlaced(psubcomm);
 48:       break;
 49:     default:
 50:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
 51:     }
 52:   }

 54:   PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg);
 55:   if (flg) {
 56:     PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_(psubcomm->parent));
 57:   }
 58:   PetscOptionsEnd();
 59:   return(0);
 60: }

 62: /*@C
 63:   PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for all
 64:   PetscSubcomm items in the options database.

 66:   Logically collective on PetscSubcomm.

 68:   Level: Intermediate

 70:   Input Parameters:
 71: +   psubcomm - PetscSubcomm context
 72: -   prefix - the prefix to prepend all PetscSubcomm item names with.

 74: @*/
 75: PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm,const char pre[])
 76: {
 77:   PetscErrorCode   ierr;

 80:    if (!pre) {
 81:     PetscFree(psubcomm->subcommprefix);
 82:   } else {
 83:     if (pre[0] == '-') SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Options prefix should not begin with a hypen");
 84:     PetscFree(psubcomm->subcommprefix);
 85:     PetscStrallocpy(pre,&(psubcomm->subcommprefix));
 86:   }
 87:   return(0);
 88: }

 90: /*@C
 91:    PetscSubcommView - Views a PetscSubcomm of values as either ASCII text or a binary file

 93:    Collective on PetscSubcomm

 95:    Input Parameter:
 96: +  psubcomm - PetscSubcomm context
 97: -  viewer - location to view the values

 99:    Level: beginner
100: @*/
101: PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
102: {
103:   PetscErrorCode    ierr;
104:   PetscBool         iascii;
105:   PetscViewerFormat format;

108:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
109:   if (iascii) {
110:     PetscViewerGetFormat(viewer,&format);
111:     if (format == PETSC_VIEWER_DEFAULT) {
112:       MPI_Comm    comm=psubcomm->parent;
113:       PetscMPIInt rank,size,subsize,subrank,duprank;

115:       MPI_Comm_size(comm,&size);
116:       PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);
117:       MPI_Comm_rank(comm,&rank);
118:       MPI_Comm_size(psubcomm->child,&subsize);
119:       MPI_Comm_rank(psubcomm->child,&subrank);
120:       MPI_Comm_rank(psubcomm->dupparent,&duprank);
121:       PetscViewerASCIIPushSynchronized(viewer);
122:       PetscViewerASCIISynchronizedPrintf(viewer,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);
123:       PetscViewerASCIIPopSynchronized(viewer);
124:       PetscViewerFlush(viewer);
125:     }
126:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
127:   return(0);
128: }

130: /*@C
131:   PetscSubcommSetNumber - Set total number of subcommunicators.

133:    Collective

135:    Input Parameter:
136: +  psubcomm - PetscSubcomm context
137: -  nsubcomm - the total number of subcommunicators in psubcomm

139:    Level: advanced

141: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
142: @*/
143: PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
144: {
146:   MPI_Comm       comm=psubcomm->parent;
147:   PetscMPIInt    msub,size;

150:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
151:   MPI_Comm_size(comm,&size);
152:   PetscMPIIntCast(nsubcomm,&msub);
153:   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);

155:   psubcomm->n = msub;
156:   return(0);
157: }

159: /*@C
160:   PetscSubcommSetType - Set type of subcommunicators.

162:    Collective

164:    Input Parameter:
165: +  psubcomm - PetscSubcomm context
166: -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED

168:    Level: advanced

170: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
171: @*/
172: PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
173: {

177:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
178:   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);

180:   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
181:     PetscSubcommCreate_contiguous(psubcomm);
182:   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
183:     PetscSubcommCreate_interlaced(psubcomm);
184:   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
185:   return(0);
186: }

188: /*@C
189:   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications

191:    Collective

193:    Input Parameter:
194: +  psubcomm - PetscSubcomm context
195: .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
196: -  subrank - rank in the subcommunicator

198:    Level: advanced

200: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
201: @*/
202: PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
203: {
205:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
206:   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
207:   PetscMPIInt    i,nsubcomm=psubcomm->n;

210:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
211:   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);

213:   MPI_Comm_split(comm,color,subrank,&subcomm);

215:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
216:   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
217:   MPI_Comm_size(comm,&size);
218:   PetscMalloc1(2*size,&recvbuf);

220:   MPI_Comm_rank(comm,&rank);
221:   MPI_Comm_size(subcomm,&mysubsize);

223:   sendbuf[0] = color;
224:   sendbuf[1] = mysubsize;
225:   MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);

227:   PetscCalloc1(nsubcomm,&subsize);
228:   for (i=0; i<2*size; i+=2) {
229:     subsize[recvbuf[i]] = recvbuf[i+1];
230:   }
231:   PetscFree(recvbuf);

233:   duprank = 0;
234:   for (icolor=0; icolor<nsubcomm; icolor++) {
235:     if (icolor != color) { /* not color of this process */
236:       duprank += subsize[icolor];
237:     } else {
238:       duprank += subrank;
239:       break;
240:     }
241:   }
242:   MPI_Comm_split(comm,0,duprank,&dupcomm);

244:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
245:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
246:   MPI_Comm_free(&dupcomm);
247:   MPI_Comm_free(&subcomm);

249:   psubcomm->color   = color;
250:   psubcomm->subsize = subsize;
251:   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
252:   return(0);
253: }

255: /*@C
256:   PetscSubcommDestroy - Destroys a PetscSubcomm object

258:    Collective on PetscSubcomm

260:    Input Parameter:
261:    .  psubcomm - the PetscSubcomm context

263:    Level: advanced

265: .seealso: PetscSubcommCreate(),PetscSubcommSetType()
266: @*/
267: PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
268: {

272:   if (!*psubcomm) return(0);
273:   PetscCommDestroy(&(*psubcomm)->dupparent);
274:   PetscCommDestroy(&(*psubcomm)->child);
275:   PetscFree((*psubcomm)->subsize);
276:   if ((*psubcomm)->subcommprefix) { PetscFree((*psubcomm)->subcommprefix); }
277:   PetscFree((*psubcomm));
278:   return(0);
279: }

281: /*@C
282:   PetscSubcommCreate - Create a PetscSubcomm context.

284:    Collective

286:    Input Parameter:
287: .  comm - MPI communicator

289:    Output Parameter:
290: .  psubcomm - location to store the PetscSubcomm context

292:    Level: advanced

294: .seealso: PetscSubcommDestroy()
295: @*/
296: PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
297: {
299:   PetscMPIInt    rank,size;

302:   PetscNew(psubcomm);

304:   /* set defaults */
305:   MPI_Comm_rank(comm,&rank);
306:   MPI_Comm_size(comm,&size);

308:   (*psubcomm)->parent    = comm;
309:   (*psubcomm)->dupparent = comm;
310:   (*psubcomm)->child     = PETSC_COMM_SELF;
311:   (*psubcomm)->n         = size;
312:   (*psubcomm)->color     = rank;
313:   (*psubcomm)->subsize   = NULL;
314:   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
315:   return(0);
316: }

318: static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
319: {
321:   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
322:   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
323:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

326:   MPI_Comm_rank(comm,&rank);
327:   MPI_Comm_size(comm,&size);

329:   /* get size of each subcommunicator */
330:   PetscMalloc1(1+nsubcomm,&subsize);

332:   np_subcomm = size/nsubcomm;
333:   nleftover  = size - nsubcomm*np_subcomm;
334:   for (i=0; i<nsubcomm; i++) {
335:     subsize[i] = np_subcomm;
336:     if (i<nleftover) subsize[i]++;
337:   }

339:   /* get color and subrank of this proc */
340:   rankstart = 0;
341:   for (i=0; i<nsubcomm; i++) {
342:     if (rank >= rankstart && rank < rankstart+subsize[i]) {
343:       color   = i;
344:       subrank = rank - rankstart;
345:       duprank = rank;
346:       break;
347:     } else rankstart += subsize[i];
348:   }

350:   MPI_Comm_split(comm,color,subrank,&subcomm);

352:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
353:   MPI_Comm_split(comm,0,duprank,&dupcomm);
354:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
355:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
356:   MPI_Comm_free(&dupcomm);
357:   MPI_Comm_free(&subcomm);

359:   psubcomm->color   = color;
360:   psubcomm->subsize = subsize;
361:   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
362:   return(0);
363: }

365: /*
366:    Note:
367:    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
368:    by iteratively taking a process into a subcommunicator.
369:    Example: size=4, nsubcomm=(*psubcomm)->n=3
370:      comm=(*psubcomm)->parent:
371:       rank:     [0]  [1]  [2]  [3]
372:       color:     0    1    2    0

374:      subcomm=(*psubcomm)->comm:
375:       subrank:  [0]  [0]  [0]  [1]

377:      dupcomm=(*psubcomm)->dupparent:
378:       duprank:  [0]  [2]  [3]  [1]

380:      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
381:            subcomm[color = 1] has subsize=1, owns process [1]
382:            subcomm[color = 2] has subsize=1, owns process [2]
383:            dupcomm has same number of processes as comm, and its duprank maps
384:            processes in subcomm contiguously into a 1d array:
385:             duprank: [0] [1]      [2]         [3]
386:             rank:    [0] [3]      [1]         [2]
387:                     subcomm[0] subcomm[1]  subcomm[2]
388: */

390: static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
391: {
393:   PetscMPIInt    rank,size,*subsize,duprank,subrank;
394:   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
395:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

398:   MPI_Comm_rank(comm,&rank);
399:   MPI_Comm_size(comm,&size);

401:   /* get size of each subcommunicator */
402:   PetscMalloc1(1+nsubcomm,&subsize);

404:   np_subcomm = size/nsubcomm;
405:   nleftover  = size - nsubcomm*np_subcomm;
406:   for (i=0; i<nsubcomm; i++) {
407:     subsize[i] = np_subcomm;
408:     if (i<nleftover) subsize[i]++;
409:   }

411:   /* find color for this proc */
412:   color   = rank%nsubcomm;
413:   subrank = rank/nsubcomm;

415:   MPI_Comm_split(comm,color,subrank,&subcomm);

417:   j = 0; duprank = 0;
418:   for (i=0; i<nsubcomm; i++) {
419:     if (j == color) {
420:       duprank += subrank;
421:       break;
422:     }
423:     duprank += subsize[i]; j++;
424:   }

426:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
427:   MPI_Comm_split(comm,0,duprank,&dupcomm);
428:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
429:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
430:   MPI_Comm_free(&dupcomm);
431:   MPI_Comm_free(&subcomm);

433:   psubcomm->color   = color;
434:   psubcomm->subsize = subsize;
435:   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
436:   return(0);
437: }