Actual source code: subcomm.c

petsc-3.6.1 2015-08-06
Report Typos and Errors
  2: /*
  3:      Provides utility routines for split MPI communicator.
  4: */
  5: #include <petscsys.h>    /*I   "petscsys.h"    I*/
  6: #include <petscviewer.h>

  8: const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",0};

 10: extern PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm);
 11: extern PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm);
 14: PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm)
 15: {
 17:   PetscSubcommType type;
 18:   PetscBool      flg;

 21:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt");
 22:   type = psubcomm->type;
 23:   PetscOptionsGetEnum(NULL,"-psubcomm_type",PetscSubcommTypes,(PetscEnum*)&type,&flg);
 24:   if (flg && psubcomm->type != type) {
 25:     /* free old structures */
 26:     PetscCommDestroy(&(psubcomm)->dupparent);
 27:     PetscCommDestroy(&(psubcomm)->child);
 28:     PetscFree((psubcomm)->subsize);
 29:     switch (type) {
 30:     case PETSC_SUBCOMM_GENERAL:
 31:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()");
 32:     case PETSC_SUBCOMM_CONTIGUOUS:
 33:       PetscSubcommCreate_contiguous(psubcomm);
 34:       break;
 35:     case PETSC_SUBCOMM_INTERLACED:
 36:       PetscSubcommCreate_interlaced(psubcomm);
 37:       break;
 38:     default:
 39:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]);
 40:     }
 41:   }

 43:   PetscOptionsHasName(NULL, "-psubcomm_view", &flg);
 44:   if (flg) {
 45:     PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);
 46:   }
 47:   return(0);
 48: }

 52: PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer)
 53: {
 54:   PetscErrorCode    ierr;
 55:   PetscBool         iascii;
 56:   PetscViewerFormat format;

 59:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 60:   if (iascii) {
 61:     PetscViewerGetFormat(viewer,&format);
 62:     if (format == PETSC_VIEWER_DEFAULT) {
 63:       MPI_Comm    comm=psubcomm->parent;
 64:       PetscMPIInt rank,size,subsize,subrank,duprank;

 66:       MPI_Comm_size(comm,&size);
 67:       PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size);
 68:       MPI_Comm_rank(comm,&rank);
 69:       MPI_Comm_size(psubcomm->child,&subsize);
 70:       MPI_Comm_rank(psubcomm->child,&subrank);
 71:       MPI_Comm_rank(psubcomm->dupparent,&duprank);
 72:       PetscSynchronizedPrintf(comm,"  [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank);
 73:       PetscSynchronizedFlush(comm,PETSC_STDOUT);
 74:     }
 75:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet");
 76:   return(0);
 77: }

 81: /*@C
 82:   PetscSubcommSetNumber - Set total number of subcommunicators.

 84:    Collective on MPI_Comm

 86:    Input Parameter:
 87: +  psubcomm - PetscSubcomm context
 88: -  nsubcomm - the total number of subcommunicators in psubcomm

 90:    Level: advanced

 92: .keywords: communicator

 94: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral()
 95: @*/
 96: PetscErrorCode  PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm)
 97: {
 99:   MPI_Comm       comm=psubcomm->parent;
100:   PetscMPIInt    msub,size;

103:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first");
104:   MPI_Comm_size(comm,&size);
105:   PetscMPIIntCast(nsubcomm,&msub);
106:   if (msub < 1 || msub > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size);

108:   psubcomm->n = msub;
109:   return(0);
110: }

114: /*@C
115:   PetscSubcommSetType - Set type of subcommunicators.

117:    Collective on MPI_Comm

119:    Input Parameter:
120: +  psubcomm - PetscSubcomm context
121: -  subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED

123:    Level: advanced

125: .keywords: communicator

127: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral()
128: @*/
129: PetscErrorCode  PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype)
130: {

134:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
135:   if (psubcomm->n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n);

137:   if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) {
138:     PetscSubcommCreate_contiguous(psubcomm);
139:   } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) {
140:     PetscSubcommCreate_interlaced(psubcomm);
141:   } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]);
142:   return(0);
143: }

147: /*@C
148:   PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications

150:    Collective on MPI_Comm

152:    Input Parameter:
153: +  psubcomm - PetscSubcomm context
154: .  color   - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator.
155: -  subrank - rank in the subcommunicator

157:    Level: advanced

159: .keywords: communicator, create

161: .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType()
162: @*/
163: PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank)
164: {
166:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;
167:   PetscMPIInt    size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize;
168:   PetscMPIInt    i,nsubcomm=psubcomm->n;

171:   if (!psubcomm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()");
172:   if (nsubcomm < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm);

174:   MPI_Comm_split(comm,color,subrank,&subcomm);

176:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
177:   /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */
178:   MPI_Comm_size(comm,&size);
179:   PetscMalloc1(2*size,&recvbuf);

181:   MPI_Comm_rank(comm,&rank);
182:   MPI_Comm_size(subcomm,&mysubsize);

184:   sendbuf[0] = color;
185:   sendbuf[1] = mysubsize;
186:   MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm);

188:   PetscCalloc1(nsubcomm,&subsize);
189:   for (i=0; i<2*size; i+=2) {
190:     subsize[recvbuf[i]] = recvbuf[i+1];
191:   }
192:   PetscFree(recvbuf);

194:   duprank = 0;
195:   for (icolor=0; icolor<nsubcomm; icolor++) {
196:     if (icolor != color) { /* not color of this process */
197:       duprank += subsize[icolor];
198:     } else {
199:       duprank += subrank;
200:       break;
201:     }
202:   }
203:   MPI_Comm_split(comm,0,duprank,&dupcomm);

205:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
206:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
207:   MPI_Comm_free(&dupcomm);
208:   MPI_Comm_free(&subcomm);

210:   psubcomm->color   = color;
211:   psubcomm->subsize = subsize;
212:   psubcomm->type    = PETSC_SUBCOMM_GENERAL;
213:   return(0);
214: }

218: PetscErrorCode  PetscSubcommDestroy(PetscSubcomm *psubcomm)
219: {

223:   if (!*psubcomm) return(0);
224:   PetscCommDestroy(&(*psubcomm)->dupparent);
225:   PetscCommDestroy(&(*psubcomm)->child);
226:   PetscFree((*psubcomm)->subsize);
227:   PetscFree((*psubcomm));
228:   return(0);
229: }

233: /*@C
234:   PetscSubcommCreate - Create a PetscSubcomm context.

236:    Collective on MPI_Comm

238:    Input Parameter:
239: .  comm - MPI communicator

241:    Output Parameter:
242: .  psubcomm - location to store the PetscSubcomm context

244:    Level: advanced

246: .keywords: communicator, create

248: .seealso: PetscSubcommDestroy()
249: @*/
250: PetscErrorCode  PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm)
251: {
253:   PetscMPIInt    rank,size;

256:   PetscNew(psubcomm);

258:   /* set defaults */
259:   MPI_Comm_rank(comm,&rank);
260:   MPI_Comm_size(comm,&size);

262:   (*psubcomm)->parent    = comm;
263:   (*psubcomm)->dupparent = comm;
264:   (*psubcomm)->child     = PETSC_COMM_SELF;
265:   (*psubcomm)->n         = size;
266:   (*psubcomm)->color     = rank;
267:   (*psubcomm)->subsize   = NULL;
268:   (*psubcomm)->type      = PETSC_SUBCOMM_INTERLACED;
269:   return(0);
270: }

274: PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm)
275: {
277:   PetscMPIInt    rank,size,*subsize,duprank=-1,subrank=-1;
278:   PetscMPIInt    np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n;
279:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

282:   MPI_Comm_rank(comm,&rank);
283:   MPI_Comm_size(comm,&size);

285:   /* get size of each subcommunicator */
286:   PetscMalloc1(1+nsubcomm,&subsize);

288:   np_subcomm = size/nsubcomm;
289:   nleftover  = size - nsubcomm*np_subcomm;
290:   for (i=0; i<nsubcomm; i++) {
291:     subsize[i] = np_subcomm;
292:     if (i<nleftover) subsize[i]++;
293:   }

295:   /* get color and subrank of this proc */
296:   rankstart = 0;
297:   for (i=0; i<nsubcomm; i++) {
298:     if (rank >= rankstart && rank < rankstart+subsize[i]) {
299:       color   = i;
300:       subrank = rank - rankstart;
301:       duprank = rank;
302:       break;
303:     } else rankstart += subsize[i];
304:   }

306:   MPI_Comm_split(comm,color,subrank,&subcomm);

308:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
309:   MPI_Comm_split(comm,0,duprank,&dupcomm);
310:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
311:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
312:   MPI_Comm_free(&dupcomm);
313:   MPI_Comm_free(&subcomm);

315:   psubcomm->color   = color;
316:   psubcomm->subsize = subsize;
317:   psubcomm->type    = PETSC_SUBCOMM_CONTIGUOUS;
318:   return(0);
319: }

323: /*
324:    Note:
325:    In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators
326:    by iteratively taking a process into a subcommunicator.
327:    Example: size=4, nsubcomm=(*psubcomm)->n=3
328:      comm=(*psubcomm)->parent:
329:       rank:     [0]  [1]  [2]  [3]
330:       color:     0    1    2    0

332:      subcomm=(*psubcomm)->comm:
333:       subrank:  [0]  [0]  [0]  [1]

335:      dupcomm=(*psubcomm)->dupparent:
336:       duprank:  [0]  [2]  [3]  [1]

338:      Here, subcomm[color = 0] has subsize=2, owns process [0] and [3]
339:            subcomm[color = 1] has subsize=1, owns process [1]
340:            subcomm[color = 2] has subsize=1, owns process [2]
341:            dupcomm has same number of processes as comm, and its duprank maps
342:            processes in subcomm contiguously into a 1d array:
343:             duprank: [0] [1]      [2]         [3]
344:             rank:    [0] [3]      [1]         [2]
345:                     subcomm[0] subcomm[1]  subcomm[2]
346: */

348: PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm)
349: {
351:   PetscMPIInt    rank,size,*subsize,duprank,subrank;
352:   PetscMPIInt    np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n;
353:   MPI_Comm       subcomm=0,dupcomm=0,comm=psubcomm->parent;

356:   MPI_Comm_rank(comm,&rank);
357:   MPI_Comm_size(comm,&size);

359:   /* get size of each subcommunicator */
360:   PetscMalloc1(1+nsubcomm,&subsize);

362:   np_subcomm = size/nsubcomm;
363:   nleftover  = size - nsubcomm*np_subcomm;
364:   for (i=0; i<nsubcomm; i++) {
365:     subsize[i] = np_subcomm;
366:     if (i<nleftover) subsize[i]++;
367:   }

369:   /* find color for this proc */
370:   color   = rank%nsubcomm;
371:   subrank = rank/nsubcomm;

373:   MPI_Comm_split(comm,color,subrank,&subcomm);

375:   j = 0; duprank = 0;
376:   for (i=0; i<nsubcomm; i++) {
377:     if (j == color) {
378:       duprank += subrank;
379:       break;
380:     }
381:     duprank += subsize[i]; j++;
382:   }

384:   /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */
385:   MPI_Comm_split(comm,0,duprank,&dupcomm);
386:   PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL);
387:   PetscCommDuplicate(subcomm,&psubcomm->child,NULL);
388:   MPI_Comm_free(&dupcomm);
389:   MPI_Comm_free(&subcomm);

391:   psubcomm->color   = color;
392:   psubcomm->subsize = subsize;
393:   psubcomm->type    = PETSC_SUBCOMM_INTERLACED;
394:   return(0);
395: }