Actual source code: subcomm.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:      Provides utility routines for split MPI communicator.
  4: */
  5:  #include <petscsys.h>
  6:  #include <petscviewer.h>

  8: const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};

 10: extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
 11: extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);

 13: /*@C
 14:    PetscSubcommSetFromOptions - Allows setting options from a PetscSubcomm

 16:    Collective on PetscSubcomm

 18:    Input Parameter:
 19: .  psubcomm - PetscSubcomm context

 21:    Level: beginner

 23: @*/
 24: PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
 25: {
 26:   PetscErrorCode   ierr;
 27:   PetscSubcommType type;
 28:   PetscBool        flg;

 31:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");

 33:   PetscOptionsBegin(psubcomm->parent,psubcomm->subcommprefix,"Options for PetscSubcomm",NULL);
 34:   PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg);
 35:   if (flg && psubcomm->type != type) {
 36:     /* free old structures */
 37:     PetscCommDestroy(&(psubcomm)->dupparent);
 38:     PetscCommDestroy(&(psubcomm)->child);
 39:     PetscFree((psubcomm)->subsize);
 40:     switch (type) {
 41:     case PETSC_SUBCOMM_GENERAL:
 42:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
 43:     case PETSC_SUBCOMM_CONTIGUOUS:
 44:       PetscSubcommCreate_contiguous(psubcomm);
 45:       break;
 46:     case PETSC_SUBCOMM_INTERLACED:
 47:       PetscSubcommCreate_interlaced(psubcomm);
 48:       break;
 49:     default:
 50:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
 51:     }
 52:   }

 54:   PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg);
 55:   if (flg) {
 56:     PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);
 57:   }
 58:   PetscOptionsEnd();
 59:   return(0);
 60: }

 62: /*@C
 63:   PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for all
 64:   PetscSubcomm items in the options database.

 66:   Logically collective on PetscSubcomm.

 68:   Level: Intermediate

 70:   Input Parameters:
 71: +   psubcomm - PetscSubcomm context
 72: -   prefix - the prefix to prepend all PetscSubcomm item names with.

 74: @*/
 75: PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm,const char pre[])
 76: {
 77:   PetscErrorCode   ierr;

 80:    if (!pre) {
 81:     PetscFree(psubcomm->subcommprefix);
 82:   } else {
 83:     if (pre[0] == '-') SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Options prefix should not begin with a hypen");
 84:     PetscFree(psubcomm->subcommprefix);
 85:     PetscStrallocpy(pre,&(psubcomm->subcommprefix));
 86:   }
 87:   return(0);
 88: }

 90: /*@C
 91:    PetscSubcommView - Views a PetscSubcomm of values as either ASCII text or a binary file

 93:    Collective on PetscSubcomm

 95:    Input Parameter:
 96: +  psubcomm - PetscSubcomm context
 97: -  viewer - location to view the values

 99:    Level: beginner
100: @*/
101: PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
102: {
103:   PetscErrorCode    ierr;
104:   PetscBool         iascii;
105:   PetscViewerFormat format;

108:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
109:   if (iascii) {
110:     PetscViewerGetFormat(viewer,&format);
111:     if (format == PETSC_VIEWER_DEFAULT) {
112:       MPI_Comm    comm=psubcomm->parent;
113:       PetscMPIInt rank,size,subsize,subrank,duprank;

115:       MPI_Comm_size(comm,&size);
116:       PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);
117:       MPI_Comm_rank(comm,&rank);
118:       MPI_Comm_size(psubcomm->child,&subsize);
119:       MPI_Comm_rank(psubcomm->child,&subrank);
120:       MPI_Comm_rank(psubcomm->dupparent,&duprank);
121:       PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);
122:       PetscSynchronizedFlush(comm,PETSC_STDOUT);
123:     }
124:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
125:   return(0);
126: }

128: /*@C
129:   PetscSubcommSetNumber - Set total number of subcommunicators.

131:    Collective on MPI_Comm

133:    Input Parameter:
134: +  psubcomm - PetscSubcomm context
135: -  nsubcomm - the total number of subcommunicators in psubcomm

137:    Level: advanced

139: .keywords: communicator

141: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
142: @*/
143: PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
144: {
146:   MPI_Comm       comm=psubcomm->parent;
147:   PetscMPIInt    msub,size;

150:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
151:   MPI_Comm_size(comm,&size);
152:   PetscMPIIntCast(nsubcomm,&msub);
153:   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);

155:   psubcomm->n = msub;
156:   return(0);
157: }

159: /*@C
160:   PetscSubcommSetType - Set type of subcommunicators.

162:    Collective on MPI_Comm

164:    Input Parameter:
165: +  psubcomm - PetscSubcomm context
166: -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED

168:    Level: advanced

170: .keywords: communicator

172: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
173: @*/
174: PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
175: {

179:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
180:   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);

182:   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
183:     PetscSubcommCreate_contiguous(psubcomm);
184:   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
185:     PetscSubcommCreate_interlaced(psubcomm);
186:   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
187:   return(0);
188: }

190: /*@C
191:   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications

193:    Collective on MPI_Comm

195:    Input Parameter:
196: +  psubcomm - PetscSubcomm context
197: .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
198: -  subrank - rank in the subcommunicator

200:    Level: advanced

202: .keywords: communicator, create

204: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
205: @*/
206: PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
207: {
209:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
210:   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
211:   PetscMPIInt    i,nsubcomm=psubcomm->n;

214:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
215:   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);

217:   MPI_Comm_split(comm,color,subrank,&subcomm);

219:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
220:   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
221:   MPI_Comm_size(comm,&size);
222:   PetscMalloc1(2*size,&recvbuf);

224:   MPI_Comm_rank(comm,&rank);
225:   MPI_Comm_size(subcomm,&mysubsize);

227:   sendbuf[0] = color;
228:   sendbuf[1] = mysubsize;
229:   MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);

231:   PetscCalloc1(nsubcomm,&subsize);
232:   for (i=0; i<2*size; i+=2) {
233:     subsize[recvbuf[i]] = recvbuf[i+1];
234:   }
235:   PetscFree(recvbuf);

237:   duprank = 0;
238:   for (icolor=0; icolor<nsubcomm; icolor++) {
239:     if (icolor != color) { /* not color of this process */
240:       duprank += subsize[icolor];
241:     } else {
242:       duprank += subrank;
243:       break;
244:     }
245:   }
246:   MPI_Comm_split(comm,0,duprank,&dupcomm);

248:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
249:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
250:   MPI_Comm_free(&dupcomm);
251:   MPI_Comm_free(&subcomm);

253:   psubcomm->color   = color;
254:   psubcomm->subsize = subsize;
255:   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
256:   return(0);
257: }

259: /*@C
260:   PetscSubcommDestroy - Destroys a PetscSubcomm object

262:    Collective on PetscSubcomm

264:    Input Parameter:
265:    .  psubcomm - the PetscSubcomm context

267:    Level: advanced

269: .seealso: PetscSubcommCreate(),PetscSubcommSetType()
270: @*/
271: PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
272: {

276:   if (!*psubcomm) return(0);
277:   PetscCommDestroy(&(*psubcomm)->dupparent);
278:   PetscCommDestroy(&(*psubcomm)->child);
279:   PetscFree((*psubcomm)->subsize);
280:   if ((*psubcomm)->subcommprefix) { PetscFree((*psubcomm)->subcommprefix); }
281:   PetscFree((*psubcomm));
282:   return(0);
283: }

285: /*@C
286:   PetscSubcommCreate - Create a PetscSubcomm context.

288:    Collective on MPI_Comm

290:    Input Parameter:
291: .  comm - MPI communicator

293:    Output Parameter:
294: .  psubcomm - location to store the PetscSubcomm context

296:    Level: advanced

298: .keywords: communicator, create

300: .seealso: PetscSubcommDestroy()
301: @*/
302: PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
303: {
305:   PetscMPIInt    rank,size;

308:   PetscNew(psubcomm);

310:   /* set defaults */
311:   MPI_Comm_rank(comm,&rank);
312:   MPI_Comm_size(comm,&size);

314:   (*psubcomm)->parent    = comm;
315:   (*psubcomm)->dupparent = comm;
316:   (*psubcomm)->child     = PETSC_COMM_SELF;
317:   (*psubcomm)->n         = size;
318:   (*psubcomm)->color     = rank;
319:   (*psubcomm)->subsize   = NULL;
320:   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
321:   return(0);
322: }

324: PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
325: {
327:   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
328:   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
329:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

332:   MPI_Comm_rank(comm,&rank);
333:   MPI_Comm_size(comm,&size);

335:   /* get size of each subcommunicator */
336:   PetscMalloc1(1+nsubcomm,&subsize);

338:   np_subcomm = size/nsubcomm;
339:   nleftover  = size - nsubcomm*np_subcomm;
340:   for (i=0; i<nsubcomm; i++) {
341:     subsize[i] = np_subcomm;
342:     if (i<nleftover) subsize[i]++;
343:   }

345:   /* get color and subrank of this proc */
346:   rankstart = 0;
347:   for (i=0; i<nsubcomm; i++) {
348:     if (rank >= rankstart && rank < rankstart+subsize[i]) {
349:       color   = i;
350:       subrank = rank - rankstart;
351:       duprank = rank;
352:       break;
353:     } else rankstart += subsize[i];
354:   }

356:   MPI_Comm_split(comm,color,subrank,&subcomm);

358:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
359:   MPI_Comm_split(comm,0,duprank,&dupcomm);
360:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
361:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
362:   MPI_Comm_free(&dupcomm);
363:   MPI_Comm_free(&subcomm);

365:   psubcomm->color   = color;
366:   psubcomm->subsize = subsize;
367:   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
368:   return(0);
369: }

371: /*
372:    Note:
373:    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
374:    by iteratively taking a process into a subcommunicator.
375:    Example: size=4, nsubcomm=(*psubcomm)->n=3
376:      comm=(*psubcomm)->parent:
377:       rank:     [0]  [1]  [2]  [3]
378:       color:     0    1    2    0

380:      subcomm=(*psubcomm)->comm:
381:       subrank:  [0]  [0]  [0]  [1]

383:      dupcomm=(*psubcomm)->dupparent:
384:       duprank:  [0]  [2]  [3]  [1]

386:      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
387:            subcomm[color = 1] has subsize=1, owns process [1]
388:            subcomm[color = 2] has subsize=1, owns process [2]
389:            dupcomm has same number of processes as comm, and its duprank maps
390:            processes in subcomm contiguously into a 1d array:
391:             duprank: [0] [1]      [2]         [3]
392:             rank:    [0] [3]      [1]         [2]
393:                     subcomm[0] subcomm[1]  subcomm[2]
394: */

396: PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
397: {
399:   PetscMPIInt    rank,size,*subsize,duprank,subrank;
400:   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
401:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

404:   MPI_Comm_rank(comm,&rank);
405:   MPI_Comm_size(comm,&size);

407:   /* get size of each subcommunicator */
408:   PetscMalloc1(1+nsubcomm,&subsize);

410:   np_subcomm = size/nsubcomm;
411:   nleftover  = size - nsubcomm*np_subcomm;
412:   for (i=0; i<nsubcomm; i++) {
413:     subsize[i] = np_subcomm;
414:     if (i<nleftover) subsize[i]++;
415:   }

417:   /* find color for this proc */
418:   color   = rank%nsubcomm;
419:   subrank = rank/nsubcomm;

421:   MPI_Comm_split(comm,color,subrank,&subcomm);

423:   j = 0; duprank = 0;
424:   for (i=0; i<nsubcomm; i++) {
425:     if (j == color) {
426:       duprank += subrank;
427:       break;
428:     }
429:     duprank += subsize[i]; j++;
430:   }

432:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
433:   MPI_Comm_split(comm,0,duprank,&dupcomm);
434:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
435:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
436:   MPI_Comm_free(&dupcomm);
437:   MPI_Comm_free(&subcomm);

439:   psubcomm->color   = color;
440:   psubcomm->subsize = subsize;
441:   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
442:   return(0);
443: }