Actual source code: isltog.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: #include <petsc/private/isimpl.h>
  3: #include <petsc/private/hashmapi.h>
  4: #include <petscsf.h>
  5: #include <petscviewer.h>

  7: PetscClassId IS_LTOGM_CLASSID;
  8: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);

 10: typedef struct {
 11:   PetscInt *globals;
 12: } ISLocalToGlobalMapping_Basic;

 14: typedef struct {
 15:   PetscHMapI globalht;
 16: } ISLocalToGlobalMapping_Hash;


 19: PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 20: {
 21:   PetscInt       numCells, step = 1;
 22:   PetscBool      isStride;

 26:   *pStart = 0;
 27:   *points = NULL;
 28:   ISGetLocalSize(pointIS, &numCells);
 29:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 30:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 31:   *pEnd   = *pStart + numCells;
 32:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 33:   return(0);
 34: }

 36: PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 37: {
 38:   PetscInt       step = 1;
 39:   PetscBool      isStride;

 43:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 44:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 45:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 46:   return(0);
 47: }

 49: PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
 50: {

 54:   if (points) {
 55:     ISSetType(subpointIS, ISGENERAL);
 56:     ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);
 57:   } else {
 58:     ISSetType(subpointIS, ISSTRIDE);
 59:     ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);
 60:   }
 61:   return(0);
 62: }

 64: /* -----------------------------------------------------------------------------------------*/

 66: /*
 67:     Creates the global mapping information in the ISLocalToGlobalMapping structure

 69:     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
 70: */
 71: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
 72: {
 73:   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;

 77:   if (mapping->data) return(0);
 78:   end   = 0;
 79:   start = PETSC_MAX_INT;

 81:   for (i=0; i<n; i++) {
 82:     if (idx[i] < 0) continue;
 83:     if (idx[i] < start) start = idx[i];
 84:     if (idx[i] > end)   end   = idx[i];
 85:   }
 86:   if (start > end) {start = 0; end = -1;}
 87:   mapping->globalstart = start;
 88:   mapping->globalend   = end;
 89:   if (!((PetscObject)mapping)->type_name) {
 90:     if ((end - start) > PetscMax(4*n,1000000)) {
 91:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);
 92:     } else {
 93:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);
 94:     }
 95:   }
 96:   (*mapping->ops->globaltolocalmappingsetup)(mapping);
 97:   return(0);
 98: }

100: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101: {
102:   PetscErrorCode              ierr;
103:   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104:   ISLocalToGlobalMapping_Basic *map;

107:   start            = mapping->globalstart;
108:   end              = mapping->globalend;
109:   PetscNew(&map);
110:   PetscMalloc1(end-start+2,&globals);
111:   map->globals     = globals;
112:   for (i=0; i<end-start+1; i++) globals[i] = -1;
113:   for (i=0; i<n; i++) {
114:     if (idx[i] < 0) continue;
115:     globals[idx[i] - start] = i;
116:   }
117:   mapping->data = (void*)map;
118:   PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
119:   return(0);
120: }

122: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123: {
124:   PetscErrorCode              ierr;
125:   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126:   ISLocalToGlobalMapping_Hash *map;

129:   PetscNew(&map);
130:   PetscHMapICreate(&map->globalht);
131:   for (i=0; i<n; i++) {
132:     if (idx[i] < 0) continue;
133:     PetscHMapISet(map->globalht,idx[i],i);
134:   }
135:   mapping->data = (void*)map;
136:   PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));
137:   return(0);
138: }

140: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141: {
142:   PetscErrorCode              ierr;
143:   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;

146:   if (!map) return(0);
147:   PetscFree(map->globals);
148:   PetscFree(mapping->data);
149:   return(0);
150: }

152: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153: {
154:   PetscErrorCode              ierr;
155:   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;

158:   if (!map) return(0);
159:   PetscHMapIDestroy(&map->globalht);
160:   PetscFree(mapping->data);
161:   return(0);
162: }

164: #define GTOLTYPE _Basic
165: #define GTOLNAME _Basic
166: #define GTOLBS mapping->bs
167: #define GTOL(g, local) do {                  \
168:     local = map->globals[g/bs - start];      \
169:     if (local >= 0) local = bs*local + (g % bs); \
170:   } while (0)

172: #include <../src/vec/is/utils/isltog.h>

174: #define GTOLTYPE _Basic
175: #define GTOLNAME Block_Basic
176: #define GTOLBS 1
177: #define GTOL(g, local) do {                  \
178:     local = map->globals[g - start];         \
179:   } while (0)
180: #include <../src/vec/is/utils/isltog.h>

182: #define GTOLTYPE _Hash
183: #define GTOLNAME _Hash
184: #define GTOLBS mapping->bs
185: #define GTOL(g, local) do {                         \
186:     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187:     if (local >= 0) local = bs*local + (g % bs);   \
188:    } while (0)
189: #include <../src/vec/is/utils/isltog.h>

191: #define GTOLTYPE _Hash
192: #define GTOLNAME Block_Hash
193: #define GTOLBS 1
194: #define GTOL(g, local) do {                         \
195:     (void)PetscHMapIGet(map->globalht,g,&local);    \
196:   } while (0)
197: #include <../src/vec/is/utils/isltog.h>

199: /*@
200:     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object

202:     Not Collective

204:     Input Parameter:
205: .   ltog - local to global mapping

207:     Output Parameter:
208: .   nltog - the duplicated local to global mapping

210:     Level: advanced

212: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
213: @*/
214: PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
215: {

220:   ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);
221:   return(0);
222: }

224: /*@
225:     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping

227:     Not Collective

229:     Input Parameter:
230: .   ltog - local to global mapping

232:     Output Parameter:
233: .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length

235:     Level: advanced

237: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
238: @*/
239: PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
240: {
244:   *n = mapping->bs*mapping->n;
245:   return(0);
246: }

248: /*@C
249:    ISLocalToGlobalMappingViewFromOptions - View from Options

251:    Collective on ISLocalToGlobalMapping

253:    Input Parameters:
254: +  A - the local to global mapping object
255: .  obj - Optional object
256: -  name - command line option

258:    Level: intermediate
259: .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
260: @*/
261: PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
262: {

267:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
268:   return(0);
269: }

271: /*@C
272:     ISLocalToGlobalMappingView - View a local to global mapping

274:     Not Collective

276:     Input Parameters:
277: +   ltog - local to global mapping
278: -   viewer - viewer

280:     Level: advanced

282: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
283: @*/
284: PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
285: {
286:   PetscInt       i;
287:   PetscMPIInt    rank;
288:   PetscBool      iascii;

293:   if (!viewer) {
294:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
295:   }

298:   MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
299:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
300:   if (iascii) {
301:     PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
302:     PetscViewerASCIIPushSynchronized(viewer);
303:     for (i=0; i<mapping->n; i++) {
304:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
305:     }
306:     PetscViewerFlush(viewer);
307:     PetscViewerASCIIPopSynchronized(viewer);
308:   }
309:   return(0);
310: }

312: /*@
313:     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
314:     ordering and a global parallel ordering.

316:     Not collective

318:     Input Parameter:
319: .   is - index set containing the global numbers for each local number

321:     Output Parameter:
322: .   mapping - new mapping data structure

324:     Notes:
325:     the block size of the IS determines the block size of the mapping
326:     Level: advanced

328: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
329: @*/
330: PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
331: {
333:   PetscInt       n,bs;
334:   const PetscInt *indices;
335:   MPI_Comm       comm;
336:   PetscBool      isblock;


342:   PetscObjectGetComm((PetscObject)is,&comm);
343:   ISGetLocalSize(is,&n);
344:   PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
345:   if (!isblock) {
346:     ISGetIndices(is,&indices);
347:     ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
348:     ISRestoreIndices(is,&indices);
349:   } else {
350:     ISGetBlockSize(is,&bs);
351:     ISBlockGetIndices(is,&indices);
352:     ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
353:     ISBlockRestoreIndices(is,&indices);
354:   }
355:   return(0);
356: }

358: /*@C
359:     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
360:     ordering and a global parallel ordering.

362:     Collective

364:     Input Parameter:
365: +   sf - star forest mapping contiguous local indices to (rank, offset)
366: -   start - first global index on this process

368:     Output Parameter:
369: .   mapping - new mapping data structure

371:     Level: advanced

373: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
374: @*/
375: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
376: {
378:   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
379:   const PetscInt *ilocal;
380:   MPI_Comm       comm;


386:   PetscObjectGetComm((PetscObject)sf,&comm);
387:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
388:   if (ilocal) {
389:     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
390:   }
391:   else maxlocal = nleaves;
392:   PetscMalloc1(nroots,&globals);
393:   PetscMalloc1(maxlocal,&ltog);
394:   for (i=0; i<nroots; i++) globals[i] = start + i;
395:   for (i=0; i<maxlocal; i++) ltog[i] = -1;
396:   PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);
397:   PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);
398:   ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
399:   PetscFree(globals);
400:   return(0);
401: }

403: /*@
404:     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping

406:     Not collective

408:     Input Parameters:
409: +   mapping - mapping data structure
410: -   bs - the blocksize

412:     Level: advanced

414: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
415: @*/
416: PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
417: {
418:   PetscInt       *nid;
419:   const PetscInt *oid;
420:   PetscInt       i,cn,on,obs,nn;

425:   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
426:   if (bs == mapping->bs) return(0);
427:   on  = mapping->n;
428:   obs = mapping->bs;
429:   oid = mapping->indices;
430:   nn  = (on*obs)/bs;
431:   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);

433:   PetscMalloc1(nn,&nid);
434:   ISLocalToGlobalMappingGetIndices(mapping,&oid);
435:   for (i=0;i<nn;i++) {
436:     PetscInt j;
437:     for (j=0,cn=0;j<bs-1;j++) {
438:       if (oid[i*bs+j] < 0) { cn++; continue; }
439:       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
440:     }
441:     if (oid[i*bs+j] < 0) cn++;
442:     if (cn) {
443:       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
444:       nid[i] = -1;
445:     } else {
446:       nid[i] = oid[i*bs]/bs;
447:     }
448:   }
449:   ISLocalToGlobalMappingRestoreIndices(mapping,&oid);

451:   mapping->n           = nn;
452:   mapping->bs          = bs;
453:   PetscFree(mapping->indices);
454:   mapping->indices     = nid;
455:   mapping->globalstart = 0;
456:   mapping->globalend   = 0;

458:   /* reset the cached information */
459:   PetscFree(mapping->info_procs);
460:   PetscFree(mapping->info_numprocs);
461:   if (mapping->info_indices) {
462:     PetscInt i;

464:     PetscFree((mapping->info_indices)[0]);
465:     for (i=1; i<mapping->info_nproc; i++) {
466:       PetscFree(mapping->info_indices[i]);
467:     }
468:     PetscFree(mapping->info_indices);
469:   }
470:   mapping->info_cached = PETSC_FALSE;

472:   if (mapping->ops->destroy) {
473:     (*mapping->ops->destroy)(mapping);
474:   }
475:   return(0);
476: }

478: /*@
479:     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
480:     ordering and a global parallel ordering.

482:     Not Collective

484:     Input Parameters:
485: .   mapping - mapping data structure

487:     Output Parameter:
488: .   bs - the blocksize

490:     Level: advanced

492: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
493: @*/
494: PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
495: {
498:   *bs = mapping->bs;
499:   return(0);
500: }

502: /*@
503:     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
504:     ordering and a global parallel ordering.

506:     Not Collective, but communicator may have more than one process

508:     Input Parameters:
509: +   comm - MPI communicator
510: .   bs - the block size
511: .   n - the number of local elements divided by the block size, or equivalently the number of block indices
512: .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
513: -   mode - see PetscCopyMode

515:     Output Parameter:
516: .   mapping - new mapping data structure

518:     Notes:
519:     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1

521:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
522:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
523:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

525:     Level: advanced

527: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
528:           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
529: @*/
530: PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
531: {
533:   PetscInt       *in;


539:   *mapping = NULL;
540:   ISInitializePackage();

542:   PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
543:                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
544:   (*mapping)->n             = n;
545:   (*mapping)->bs            = bs;
546:   (*mapping)->info_cached   = PETSC_FALSE;
547:   (*mapping)->info_free     = PETSC_FALSE;
548:   (*mapping)->info_procs    = NULL;
549:   (*mapping)->info_numprocs = NULL;
550:   (*mapping)->info_indices  = NULL;
551:   (*mapping)->info_nodec    = NULL;
552:   (*mapping)->info_nodei    = NULL;

554:   (*mapping)->ops->globaltolocalmappingapply      = NULL;
555:   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
556:   (*mapping)->ops->destroy                        = NULL;
557:   if (mode == PETSC_COPY_VALUES) {
558:     PetscMalloc1(n,&in);
559:     PetscArraycpy(in,indices,n);
560:     (*mapping)->indices = in;
561:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
562:   } else if (mode == PETSC_OWN_POINTER) {
563:     (*mapping)->indices = (PetscInt*)indices;
564:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
565:   }
566:   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
567:   return(0);
568: }

570: PetscFunctionList ISLocalToGlobalMappingList = NULL;


573: /*@
574:    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.

576:    Not collective

578:    Input Parameters:
579: .  mapping - mapping data structure

581:    Level: advanced

583: @*/
584: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
585: {
586:   PetscErrorCode             ierr;
587:   char                       type[256];
588:   ISLocalToGlobalMappingType defaulttype = "Not set";
589:   PetscBool                  flg;

593:   ISLocalToGlobalMappingRegisterAll();
594:   PetscObjectOptionsBegin((PetscObject)mapping);
595:   PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);
596:   if (flg) {
597:     ISLocalToGlobalMappingSetType(mapping,type);
598:   }
599:   PetscOptionsEnd();
600:   return(0);
601: }

603: /*@
604:    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
605:    ordering and a global parallel ordering.

607:    Note Collective

609:    Input Parameters:
610: .  mapping - mapping data structure

612:    Level: advanced

614: .seealso: ISLocalToGlobalMappingCreate()
615: @*/
616: PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
617: {

621:   if (!*mapping) return(0);
623:   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;return(0);}
624:   PetscFree((*mapping)->indices);
625:   PetscFree((*mapping)->info_procs);
626:   PetscFree((*mapping)->info_numprocs);
627:   if ((*mapping)->info_indices) {
628:     PetscInt i;

630:     PetscFree(((*mapping)->info_indices)[0]);
631:     for (i=1; i<(*mapping)->info_nproc; i++) {
632:       PetscFree(((*mapping)->info_indices)[i]);
633:     }
634:     PetscFree((*mapping)->info_indices);
635:   }
636:   if ((*mapping)->info_nodei) {
637:     PetscFree(((*mapping)->info_nodei)[0]);
638:   }
639:   PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);
640:   if ((*mapping)->ops->destroy) {
641:     (*(*mapping)->ops->destroy)(*mapping);
642:   }
643:   PetscHeaderDestroy(mapping);
644:   *mapping = NULL;
645:   return(0);
646: }

648: /*@
649:     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
650:     a new index set using the global numbering defined in an ISLocalToGlobalMapping
651:     context.

653:     Collective on is

655:     Input Parameters:
656: +   mapping - mapping between local and global numbering
657: -   is - index set in local numbering

659:     Output Parameters:
660: .   newis - index set in global numbering

662:     Notes:
663:     The output IS will have the same communicator of the input IS.

665:     Level: advanced

667: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
668:           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
669: @*/
670: PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
671: {
673:   PetscInt       n,*idxout;
674:   const PetscInt *idxin;


681:   ISGetLocalSize(is,&n);
682:   ISGetIndices(is,&idxin);
683:   PetscMalloc1(n,&idxout);
684:   ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
685:   ISRestoreIndices(is,&idxin);
686:   ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
687:   return(0);
688: }

690: /*@
691:    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
692:    and converts them to the global numbering.

694:    Not collective

696:    Input Parameters:
697: +  mapping - the local to global mapping context
698: .  N - number of integers
699: -  in - input indices in local numbering

701:    Output Parameter:
702: .  out - indices in global numbering

704:    Notes:
705:    The in and out array parameters may be identical.

707:    Level: advanced

709: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
710:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
711:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

713: @*/
714: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
715: {
716:   PetscInt i,bs,Nmax;

720:   bs   = mapping->bs;
721:   Nmax = bs*mapping->n;
722:   if (bs == 1) {
723:     const PetscInt *idx = mapping->indices;
724:     for (i=0; i<N; i++) {
725:       if (in[i] < 0) {
726:         out[i] = in[i];
727:         continue;
728:       }
729:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
730:       out[i] = idx[in[i]];
731:     }
732:   } else {
733:     const PetscInt *idx = mapping->indices;
734:     for (i=0; i<N; i++) {
735:       if (in[i] < 0) {
736:         out[i] = in[i];
737:         continue;
738:       }
739:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
740:       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
741:     }
742:   }
743:   return(0);
744: }

746: /*@
747:    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering

749:    Not collective

751:    Input Parameters:
752: +  mapping - the local to global mapping context
753: .  N - number of integers
754: -  in - input indices in local block numbering

756:    Output Parameter:
757: .  out - indices in global block numbering

759:    Notes:
760:    The in and out array parameters may be identical.

762:    Example:
763:      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
764:      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.

766:    Level: advanced

768: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
769:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
770:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

772: @*/
773: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
774: {

778:   {
779:     PetscInt i,Nmax = mapping->n;
780:     const PetscInt *idx = mapping->indices;

782:     for (i=0; i<N; i++) {
783:       if (in[i] < 0) {
784:         out[i] = in[i];
785:         continue;
786:       }
787:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
788:       out[i] = idx[in[i]];
789:     }
790:   }
791:   return(0);
792: }

794: /*@
795:     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
796:     specified with a global numbering.

798:     Not collective

800:     Input Parameters:
801: +   mapping - mapping between local and global numbering
802: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
803:            IS_GTOLM_DROP - drops the indices with no local value from the output list
804: .   n - number of global indices to map
805: -   idx - global indices to map

807:     Output Parameters:
808: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
809: -   idxout - local index of each global index, one must pass in an array long enough
810:              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
811:              idxout == NULL to determine the required length (returned in nout)
812:              and then allocate the required space and call ISGlobalToLocalMappingApply()
813:              a second time to set the values.

815:     Notes:
816:     Either nout or idxout may be NULL. idx and idxout may be identical.

818:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
819:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
820:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

822:     Level: advanced

824:     Developer Note: The manual page states that idx and idxout may be identical but the calling
825:        sequence declares idx as const so it cannot be the same as idxout.

827: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
828:           ISLocalToGlobalMappingDestroy()
829: @*/
830: PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
831: {

836:   if (!mapping->data) {
837:     ISGlobalToLocalMappingSetUp(mapping);
838:   }
839:   (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);
840:   return(0);
841: }

843: /*@
844:     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
845:     a new index set using the local numbering defined in an ISLocalToGlobalMapping
846:     context.

848:     Not collective

850:     Input Parameters:
851: +   mapping - mapping between local and global numbering
852: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
853:            IS_GTOLM_DROP - drops the indices with no local value from the output list
854: -   is - index set in global numbering

856:     Output Parameters:
857: .   newis - index set in local numbering

859:     Notes:
860:     The output IS will be sequential, as it encodes a purely local operation

862:     Level: advanced

864: .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
865:           ISLocalToGlobalMappingDestroy()
866: @*/
867: PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
868: {
870:   PetscInt       n,nout,*idxout;
871:   const PetscInt *idxin;


878:   ISGetLocalSize(is,&n);
879:   ISGetIndices(is,&idxin);
880:   if (type == IS_GTOLM_MASK) {
881:     PetscMalloc1(n,&idxout);
882:   } else {
883:     ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);
884:     PetscMalloc1(nout,&idxout);
885:   }
886:   ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);
887:   ISRestoreIndices(is,&idxin);
888:   ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);
889:   return(0);
890: }

892: /*@
893:     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
894:     specified with a block global numbering.

896:     Not collective

898:     Input Parameters:
899: +   mapping - mapping between local and global numbering
900: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
901:            IS_GTOLM_DROP - drops the indices with no local value from the output list
902: .   n - number of global indices to map
903: -   idx - global indices to map

905:     Output Parameters:
906: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
907: -   idxout - local index of each global index, one must pass in an array long enough
908:              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
909:              idxout == NULL to determine the required length (returned in nout)
910:              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
911:              a second time to set the values.

913:     Notes:
914:     Either nout or idxout may be NULL. idx and idxout may be identical.

916:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
917:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
918:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

920:     Level: advanced

922:     Developer Note: The manual page states that idx and idxout may be identical but the calling
923:        sequence declares idx as const so it cannot be the same as idxout.

925: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
926:           ISLocalToGlobalMappingDestroy()
927: @*/
928: PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
929:                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
930: {

935:   if (!mapping->data) {
936:     ISGlobalToLocalMappingSetUp(mapping);
937:   }
938:   (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);
939:   return(0);
940: }


943: /*@C
944:     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
945:      each index shared by more than one processor

947:     Collective on ISLocalToGlobalMapping

949:     Input Parameters:
950: .   mapping - the mapping from local to global indexing

952:     Output Parameter:
953: +   nproc - number of processors that are connected to this one
954: .   proc - neighboring processors
955: .   numproc - number of indices for each subdomain (processor)
956: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

958:     Level: advanced

960:     Fortran Usage:
961: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
962: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
963:           PetscInt indices[nproc][numprocmax],ierr)
964:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
965:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


968: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
969:           ISLocalToGlobalMappingRestoreInfo()
970: @*/
971: PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
972: {

977:   if (mapping->info_cached) {
978:     *nproc    = mapping->info_nproc;
979:     *procs    = mapping->info_procs;
980:     *numprocs = mapping->info_numprocs;
981:     *indices  = mapping->info_indices;
982:   } else {
983:     ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);
984:   }
985:   return(0);
986: }

988: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
989: {
991:   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
992:   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
993:   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
994:   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
995:   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
996:   PetscInt       first_procs,first_numprocs,*first_indices;
997:   MPI_Request    *recv_waits,*send_waits;
998:   MPI_Status     recv_status,*send_status,*recv_statuses;
999:   MPI_Comm       comm;
1000:   PetscBool      debug = PETSC_FALSE;

1003:   PetscObjectGetComm((PetscObject)mapping,&comm);
1004:   MPI_Comm_size(comm,&size);
1005:   MPI_Comm_rank(comm,&rank);
1006:   if (size == 1) {
1007:     *nproc         = 0;
1008:     *procs         = NULL;
1009:     PetscNew(numprocs);
1010:     (*numprocs)[0] = 0;
1011:     PetscNew(indices);
1012:     (*indices)[0]  = NULL;
1013:     /* save info for reuse */
1014:     mapping->info_nproc = *nproc;
1015:     mapping->info_procs = *procs;
1016:     mapping->info_numprocs = *numprocs;
1017:     mapping->info_indices = *indices;
1018:     mapping->info_cached = PETSC_TRUE;
1019:     return(0);
1020:   }

1022:   PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);

1024:   /*
1025:     Notes on ISLocalToGlobalMappingGetBlockInfo

1027:     globally owned node - the nodes that have been assigned to this processor in global
1028:            numbering, just for this routine.

1030:     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1031:            boundary (i.e. is has more than one local owner)

1033:     locally owned node - node that exists on this processors subdomain

1035:     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1036:            local subdomain
1037:   */
1038:   PetscObjectGetNewTag((PetscObject)mapping,&tag1);
1039:   PetscObjectGetNewTag((PetscObject)mapping,&tag2);
1040:   PetscObjectGetNewTag((PetscObject)mapping,&tag3);

1042:   for (i=0; i<n; i++) {
1043:     if (lindices[i] > max) max = lindices[i];
1044:   }
1045:   MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
1046:   Ng++;
1047:   MPI_Comm_size(comm,&size);
1048:   MPI_Comm_rank(comm,&rank);
1049:   scale  = Ng/size + 1;
1050:   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1051:   rstart = scale*rank;

1053:   /* determine ownership ranges of global indices */
1054:   PetscMalloc1(2*size,&nprocs);
1055:   PetscArrayzero(nprocs,2*size);

1057:   /* determine owners of each local node  */
1058:   PetscMalloc1(n,&owner);
1059:   for (i=0; i<n; i++) {
1060:     proc             = lindices[i]/scale; /* processor that globally owns this index */
1061:     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1062:     owner[i]         = proc;
1063:     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1064:   }
1065:   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1066:   PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);

1068:   /* inform other processors of number of messages and max length*/
1069:   PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
1070:   PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);

1072:   /* post receives for owned rows */
1073:   PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
1074:   PetscMalloc1(nrecvs+1,&recv_waits);
1075:   for (i=0; i<nrecvs; i++) {
1076:     MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
1077:   }

1079:   /* pack messages containing lists of local nodes to owners */
1080:   PetscMalloc1(2*n+1,&sends);
1081:   PetscMalloc1(size+1,&starts);
1082:   starts[0] = 0;
1083:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1084:   for (i=0; i<n; i++) {
1085:     sends[starts[owner[i]]++] = lindices[i];
1086:     sends[starts[owner[i]]++] = i;
1087:   }
1088:   PetscFree(owner);
1089:   starts[0] = 0;
1090:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];

1092:   /* send the messages */
1093:   PetscMalloc1(nsends+1,&send_waits);
1094:   PetscMalloc1(nsends+1,&dest);
1095:   cnt = 0;
1096:   for (i=0; i<size; i++) {
1097:     if (nprocs[2*i]) {
1098:       MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
1099:       dest[cnt] = i;
1100:       cnt++;
1101:     }
1102:   }
1103:   PetscFree(starts);

1105:   /* wait on receives */
1106:   PetscMalloc1(nrecvs+1,&source);
1107:   PetscMalloc1(nrecvs+1,&len);
1108:   cnt  = nrecvs;
1109:   PetscCalloc1(ng+1,&nownedsenders);
1110:   while (cnt) {
1111:     MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
1112:     /* unpack receives into our local space */
1113:     MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
1114:     source[imdex] = recv_status.MPI_SOURCE;
1115:     len[imdex]    = len[imdex]/2;
1116:     /* count how many local owners for each of my global owned indices */
1117:     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1118:     cnt--;
1119:   }
1120:   PetscFree(recv_waits);

1122:   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1123:   nowned  = 0;
1124:   nownedm = 0;
1125:   for (i=0; i<ng; i++) {
1126:     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1127:   }

1129:   /* create single array to contain rank of all local owners of each globally owned index */
1130:   PetscMalloc1(nownedm+1,&ownedsenders);
1131:   PetscMalloc1(ng+1,&starts);
1132:   starts[0] = 0;
1133:   for (i=1; i<ng; i++) {
1134:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1135:     else starts[i] = starts[i-1];
1136:   }

1138:   /* for each nontrival globally owned node list all arriving processors */
1139:   for (i=0; i<nrecvs; i++) {
1140:     for (j=0; j<len[i]; j++) {
1141:       node = recvs[2*i*nmax+2*j]-rstart;
1142:       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1143:     }
1144:   }

1146:   if (debug) { /* -----------------------------------  */
1147:     starts[0] = 0;
1148:     for (i=1; i<ng; i++) {
1149:       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1150:       else starts[i] = starts[i-1];
1151:     }
1152:     for (i=0; i<ng; i++) {
1153:       if (nownedsenders[i] > 1) {
1154:         PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
1155:         for (j=0; j<nownedsenders[i]; j++) {
1156:           PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
1157:         }
1158:         PetscSynchronizedPrintf(comm,"\n");
1159:       }
1160:     }
1161:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1162:   } /* -----------------------------------  */

1164:   /* wait on original sends */
1165:   if (nsends) {
1166:     PetscMalloc1(nsends,&send_status);
1167:     MPI_Waitall(nsends,send_waits,send_status);
1168:     PetscFree(send_status);
1169:   }
1170:   PetscFree(send_waits);
1171:   PetscFree(sends);
1172:   PetscFree(nprocs);

1174:   /* pack messages to send back to local owners */
1175:   starts[0] = 0;
1176:   for (i=1; i<ng; i++) {
1177:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1178:     else starts[i] = starts[i-1];
1179:   }
1180:   nsends2 = nrecvs;
1181:   PetscMalloc1(nsends2+1,&nprocs); /* length of each message */
1182:   for (i=0; i<nrecvs; i++) {
1183:     nprocs[i] = 1;
1184:     for (j=0; j<len[i]; j++) {
1185:       node = recvs[2*i*nmax+2*j]-rstart;
1186:       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1187:     }
1188:   }
1189:   nt = 0;
1190:   for (i=0; i<nsends2; i++) nt += nprocs[i];

1192:   PetscMalloc1(nt+1,&sends2);
1193:   PetscMalloc1(nsends2+1,&starts2);

1195:   starts2[0] = 0;
1196:   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1197:   /*
1198:      Each message is 1 + nprocs[i] long, and consists of
1199:        (0) the number of nodes being sent back
1200:        (1) the local node number,
1201:        (2) the number of processors sharing it,
1202:        (3) the processors sharing it
1203:   */
1204:   for (i=0; i<nsends2; i++) {
1205:     cnt = 1;
1206:     sends2[starts2[i]] = 0;
1207:     for (j=0; j<len[i]; j++) {
1208:       node = recvs[2*i*nmax+2*j]-rstart;
1209:       if (nownedsenders[node] > 1) {
1210:         sends2[starts2[i]]++;
1211:         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1212:         sends2[starts2[i]+cnt++] = nownedsenders[node];
1213:         PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);
1214:         cnt += nownedsenders[node];
1215:       }
1216:     }
1217:   }

1219:   /* receive the message lengths */
1220:   nrecvs2 = nsends;
1221:   PetscMalloc1(nrecvs2+1,&lens2);
1222:   PetscMalloc1(nrecvs2+1,&starts3);
1223:   PetscMalloc1(nrecvs2+1,&recv_waits);
1224:   for (i=0; i<nrecvs2; i++) {
1225:     MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
1226:   }

1228:   /* send the message lengths */
1229:   for (i=0; i<nsends2; i++) {
1230:     MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
1231:   }

1233:   /* wait on receives of lens */
1234:   if (nrecvs2) {
1235:     PetscMalloc1(nrecvs2,&recv_statuses);
1236:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1237:     PetscFree(recv_statuses);
1238:   }
1239:   PetscFree(recv_waits);

1241:   starts3[0] = 0;
1242:   nt         = 0;
1243:   for (i=0; i<nrecvs2-1; i++) {
1244:     starts3[i+1] = starts3[i] + lens2[i];
1245:     nt          += lens2[i];
1246:   }
1247:   if (nrecvs2) nt += lens2[nrecvs2-1];

1249:   PetscMalloc1(nt+1,&recvs2);
1250:   PetscMalloc1(nrecvs2+1,&recv_waits);
1251:   for (i=0; i<nrecvs2; i++) {
1252:     MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
1253:   }

1255:   /* send the messages */
1256:   PetscMalloc1(nsends2+1,&send_waits);
1257:   for (i=0; i<nsends2; i++) {
1258:     MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
1259:   }

1261:   /* wait on receives */
1262:   if (nrecvs2) {
1263:     PetscMalloc1(nrecvs2,&recv_statuses);
1264:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1265:     PetscFree(recv_statuses);
1266:   }
1267:   PetscFree(recv_waits);
1268:   PetscFree(nprocs);

1270:   if (debug) { /* -----------------------------------  */
1271:     cnt = 0;
1272:     for (i=0; i<nrecvs2; i++) {
1273:       nt = recvs2[cnt++];
1274:       for (j=0; j<nt; j++) {
1275:         PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
1276:         for (k=0; k<recvs2[cnt+1]; k++) {
1277:           PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
1278:         }
1279:         cnt += 2 + recvs2[cnt+1];
1280:         PetscSynchronizedPrintf(comm,"\n");
1281:       }
1282:     }
1283:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1284:   } /* -----------------------------------  */

1286:   /* count number subdomains for each local node */
1287:   PetscCalloc1(size,&nprocs);
1288:   cnt  = 0;
1289:   for (i=0; i<nrecvs2; i++) {
1290:     nt = recvs2[cnt++];
1291:     for (j=0; j<nt; j++) {
1292:       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1293:       cnt += 2 + recvs2[cnt+1];
1294:     }
1295:   }
1296:   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1297:   *nproc    = nt;
1298:   PetscMalloc1(nt+1,procs);
1299:   PetscMalloc1(nt+1,numprocs);
1300:   PetscMalloc1(nt+1,indices);
1301:   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1302:   PetscMalloc1(size,&bprocs);
1303:   cnt  = 0;
1304:   for (i=0; i<size; i++) {
1305:     if (nprocs[i] > 0) {
1306:       bprocs[i]        = cnt;
1307:       (*procs)[cnt]    = i;
1308:       (*numprocs)[cnt] = nprocs[i];
1309:       PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1310:       cnt++;
1311:     }
1312:   }

1314:   /* make the list of subdomains for each nontrivial local node */
1315:   PetscArrayzero(*numprocs,nt);
1316:   cnt  = 0;
1317:   for (i=0; i<nrecvs2; i++) {
1318:     nt = recvs2[cnt++];
1319:     for (j=0; j<nt; j++) {
1320:       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1321:       cnt += 2 + recvs2[cnt+1];
1322:     }
1323:   }
1324:   PetscFree(bprocs);
1325:   PetscFree(recvs2);

1327:   /* sort the node indexing by their global numbers */
1328:   nt = *nproc;
1329:   for (i=0; i<nt; i++) {
1330:     PetscMalloc1((*numprocs)[i],&tmp);
1331:     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1332:     PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1333:     PetscFree(tmp);
1334:   }

1336:   if (debug) { /* -----------------------------------  */
1337:     nt = *nproc;
1338:     for (i=0; i<nt; i++) {
1339:       PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1340:       for (j=0; j<(*numprocs)[i]; j++) {
1341:         PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1342:       }
1343:       PetscSynchronizedPrintf(comm,"\n");
1344:     }
1345:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1346:   } /* -----------------------------------  */

1348:   /* wait on sends */
1349:   if (nsends2) {
1350:     PetscMalloc1(nsends2,&send_status);
1351:     MPI_Waitall(nsends2,send_waits,send_status);
1352:     PetscFree(send_status);
1353:   }

1355:   PetscFree(starts3);
1356:   PetscFree(dest);
1357:   PetscFree(send_waits);

1359:   PetscFree(nownedsenders);
1360:   PetscFree(ownedsenders);
1361:   PetscFree(starts);
1362:   PetscFree(starts2);
1363:   PetscFree(lens2);

1365:   PetscFree(source);
1366:   PetscFree(len);
1367:   PetscFree(recvs);
1368:   PetscFree(nprocs);
1369:   PetscFree(sends2);

1371:   /* put the information about myself as the first entry in the list */
1372:   first_procs    = (*procs)[0];
1373:   first_numprocs = (*numprocs)[0];
1374:   first_indices  = (*indices)[0];
1375:   for (i=0; i<*nproc; i++) {
1376:     if ((*procs)[i] == rank) {
1377:       (*procs)[0]    = (*procs)[i];
1378:       (*numprocs)[0] = (*numprocs)[i];
1379:       (*indices)[0]  = (*indices)[i];
1380:       (*procs)[i]    = first_procs;
1381:       (*numprocs)[i] = first_numprocs;
1382:       (*indices)[i]  = first_indices;
1383:       break;
1384:     }
1385:   }

1387:   /* save info for reuse */
1388:   mapping->info_nproc = *nproc;
1389:   mapping->info_procs = *procs;
1390:   mapping->info_numprocs = *numprocs;
1391:   mapping->info_indices = *indices;
1392:   mapping->info_cached = PETSC_TRUE;
1393:   return(0);
1394: }

1396: /*@C
1397:     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()

1399:     Collective on ISLocalToGlobalMapping

1401:     Input Parameters:
1402: .   mapping - the mapping from local to global indexing

1404:     Output Parameter:
1405: +   nproc - number of processors that are connected to this one
1406: .   proc - neighboring processors
1407: .   numproc - number of indices for each processor
1408: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1410:     Level: advanced

1412: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1413:           ISLocalToGlobalMappingGetInfo()
1414: @*/
1415: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1416: {

1421:   if (mapping->info_free) {
1422:     PetscFree(*numprocs);
1423:     if (*indices) {
1424:       PetscInt i;

1426:       PetscFree((*indices)[0]);
1427:       for (i=1; i<*nproc; i++) {
1428:         PetscFree((*indices)[i]);
1429:       }
1430:       PetscFree(*indices);
1431:     }
1432:   }
1433:   *nproc    = 0;
1434:   *procs    = NULL;
1435:   *numprocs = NULL;
1436:   *indices  = NULL;
1437:   return(0);
1438: }

1440: /*@C
1441:     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1442:      each index shared by more than one processor

1444:     Collective on ISLocalToGlobalMapping

1446:     Input Parameters:
1447: .   mapping - the mapping from local to global indexing

1449:     Output Parameter:
1450: +   nproc - number of processors that are connected to this one
1451: .   proc - neighboring processors
1452: .   numproc - number of indices for each subdomain (processor)
1453: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1455:     Level: advanced

1457:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1459:     Fortran Usage:
1460: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1461: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1462:           PetscInt indices[nproc][numprocmax],ierr)
1463:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1464:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


1467: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1468:           ISLocalToGlobalMappingRestoreInfo()
1469: @*/
1470: PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1471: {
1473:   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;

1477:   ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);
1478:   if (bs > 1) { /* we need to expand the cached info */
1479:     PetscCalloc1(*nproc,&*indices);
1480:     PetscCalloc1(*nproc,&*numprocs);
1481:     for (i=0; i<*nproc; i++) {
1482:       PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);
1483:       for (j=0; j<bnumprocs[i]; j++) {
1484:         for (k=0; k<bs; k++) {
1485:           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1486:         }
1487:       }
1488:       (*numprocs)[i] = bnumprocs[i]*bs;
1489:     }
1490:     mapping->info_free = PETSC_TRUE;
1491:   } else {
1492:     *numprocs = bnumprocs;
1493:     *indices  = bindices;
1494:   }
1495:   return(0);
1496: }

1498: /*@C
1499:     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()

1501:     Collective on ISLocalToGlobalMapping

1503:     Input Parameters:
1504: .   mapping - the mapping from local to global indexing

1506:     Output Parameter:
1507: +   nproc - number of processors that are connected to this one
1508: .   proc - neighboring processors
1509: .   numproc - number of indices for each processor
1510: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1512:     Level: advanced

1514: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1515:           ISLocalToGlobalMappingGetInfo()
1516: @*/
1517: PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1518: {

1522:   ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1523:   return(0);
1524: }

1526: /*@C
1527:     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node

1529:     Collective on ISLocalToGlobalMapping

1531:     Input Parameters:
1532: .   mapping - the mapping from local to global indexing

1534:     Output Parameter:
1535: +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1536: .   count - number of neighboring processors per node
1537: -   indices - indices of processes sharing the node (sorted)

1539:     Level: advanced

1541:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1543: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1544:           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1545: @*/
1546: PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1547: {
1548:   PetscInt       n;

1553:   ISLocalToGlobalMappingGetSize(mapping,&n);
1554:   if (!mapping->info_nodec) {
1555:     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;

1557:     PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);
1558:     ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1559:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1560:     m = n;
1561:     mapping->info_nodec[n] = 0;
1562:     for (i=1;i<n_neigh;i++) {
1563:       PetscInt j;

1565:       m += n_shared[i];
1566:       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1567:     }
1568:     if (n) { PetscMalloc1(m,&mapping->info_nodei[0]); }
1569:     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1570:     PetscArrayzero(mapping->info_nodec,n);
1571:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1572:     for (i=1;i<n_neigh;i++) {
1573:       PetscInt j;

1575:       for (j=0;j<n_shared[i];j++) {
1576:         PetscInt k = shared[i][j];

1578:         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1579:         mapping->info_nodec[k] += 1;
1580:       }
1581:     }
1582:     for (i=0;i<n;i++) { PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]); }
1583:     ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1584:   }
1585:   if (nnodes)  *nnodes  = n;
1586:   if (count)   *count   = mapping->info_nodec;
1587:   if (indices) *indices = mapping->info_nodei;
1588:   return(0);
1589: }

1591: /*@C
1592:     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()

1594:     Collective on ISLocalToGlobalMapping

1596:     Input Parameters:
1597: .   mapping - the mapping from local to global indexing

1599:     Output Parameter:
1600: +   nnodes - number of local nodes
1601: .   count - number of neighboring processors per node
1602: -   indices - indices of processes sharing the node (sorted)

1604:     Level: advanced

1606: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1607:           ISLocalToGlobalMappingGetInfo()
1608: @*/
1609: PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1610: {
1613:   if (nnodes)  *nnodes  = 0;
1614:   if (count)   *count   = NULL;
1615:   if (indices) *indices = NULL;
1616:   return(0);
1617: }

1619: /*@C
1620:    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped

1622:    Not Collective

1624:    Input Arguments:
1625: . ltog - local to global mapping

1627:    Output Arguments:
1628: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()

1630:    Level: advanced

1632:    Notes:
1633:     ISLocalToGlobalMappingGetSize() returns the length the this array

1635: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1636: @*/
1637: PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1638: {
1642:   if (ltog->bs == 1) {
1643:     *array = ltog->indices;
1644:   } else {
1645:     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1646:     const PetscInt *ii;

1649:     PetscMalloc1(bs*n,&jj);
1650:     *array = jj;
1651:     k    = 0;
1652:     ii   = ltog->indices;
1653:     for (i=0; i<n; i++)
1654:       for (j=0; j<bs; j++)
1655:         jj[k++] = bs*ii[i] + j;
1656:   }
1657:   return(0);
1658: }

1660: /*@C
1661:    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()

1663:    Not Collective

1665:    Input Arguments:
1666: + ltog - local to global mapping
1667: - array - array of indices

1669:    Level: advanced

1671: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1672: @*/
1673: PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1674: {
1678:   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");

1680:   if (ltog->bs > 1) {
1682:     PetscFree(*(void**)array);
1683:   }
1684:   return(0);
1685: }

1687: /*@C
1688:    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block

1690:    Not Collective

1692:    Input Arguments:
1693: . ltog - local to global mapping

1695:    Output Arguments:
1696: . array - array of indices

1698:    Level: advanced

1700: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1701: @*/
1702: PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1703: {
1707:   *array = ltog->indices;
1708:   return(0);
1709: }

1711: /*@C
1712:    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()

1714:    Not Collective

1716:    Input Arguments:
1717: + ltog - local to global mapping
1718: - array - array of indices

1720:    Level: advanced

1722: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1723: @*/
1724: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1725: {
1729:   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1730:   *array = NULL;
1731:   return(0);
1732: }

1734: /*@C
1735:    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings

1737:    Not Collective

1739:    Input Arguments:
1740: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1741: . n - number of mappings to concatenate
1742: - ltogs - local to global mappings

1744:    Output Arguments:
1745: . ltogcat - new mapping

1747:    Note: this currently always returns a mapping with block size of 1

1749:    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case

1751:    Level: advanced

1753: .seealso: ISLocalToGlobalMappingCreate()
1754: @*/
1755: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1756: {
1757:   PetscInt       i,cnt,m,*idx;

1761:   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1765:   for (cnt=0,i=0; i<n; i++) {
1766:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1767:     cnt += m;
1768:   }
1769:   PetscMalloc1(cnt,&idx);
1770:   for (cnt=0,i=0; i<n; i++) {
1771:     const PetscInt *subidx;
1772:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1773:     ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1774:     PetscArraycpy(&idx[cnt],subidx,m);
1775:     ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1776:     cnt += m;
1777:   }
1778:   ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1779:   return(0);
1780: }

1782: /*MC
1783:       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1784:                                     used this is good for only small and moderate size problems.

1786:    Options Database Keys:
1787: .   -islocaltoglobalmapping_type basic - select this method

1789:    Level: beginner

1791: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1792: M*/
1793: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1794: {
1796:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1797:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1798:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1799:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1800:   return(0);
1801: }

1803: /*MC
1804:       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1805:                                     used this is good for large memory problems.

1807:    Options Database Keys:
1808: .   -islocaltoglobalmapping_type hash - select this method


1811:    Notes:
1812:     This is selected automatically for large problems if the user does not set the type.

1814:    Level: beginner

1816: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1817: M*/
1818: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1819: {
1821:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1822:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1823:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1824:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1825:   return(0);
1826: }


1829: /*@C
1830:     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping

1832:    Not Collective

1834:    Input Parameters:
1835: +  sname - name of a new method
1836: -  routine_create - routine to create method context

1838:    Notes:
1839:    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.

1841:    Sample usage:
1842: .vb
1843:    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1844: .ve

1846:    Then, your mapping can be chosen with the procedural interface via
1847: $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1848:    or at runtime via the option
1849: $     -islocaltoglobalmapping_type my_mapper

1851:    Level: advanced

1853: .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH

1855: @*/
1856: PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1857: {

1861:   ISInitializePackage();
1862:   PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);
1863:   return(0);
1864: }

1866: /*@C
1867:    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.

1869:    Logically Collective on ISLocalToGlobalMapping

1871:    Input Parameters:
1872: +  ltog - the ISLocalToGlobalMapping object
1873: -  type - a known method

1875:    Options Database Key:
1876: .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1877:     of available methods (for instance, basic or hash)

1879:    Notes:
1880:    See "petsc/include/petscis.h" for available methods

1882:   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1883:   then set the ISLocalToGlobalMapping type from the options database rather than by using
1884:   this routine.

1886:   Level: intermediate

1888:   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1889:   are accessed by ISLocalToGlobalMappingSetType().

1891: .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()

1893: @*/
1894: PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1895: {
1896:   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1897:   PetscBool      match;


1903:   PetscObjectTypeCompare((PetscObject)ltog,type,&match);
1904:   if (match) return(0);

1906:    PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);
1907:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1908:   /* Destroy the previous private LTOG context */
1909:   if (ltog->ops->destroy) {
1910:     (*ltog->ops->destroy)(ltog);
1911:     ltog->ops->destroy = NULL;
1912:   }
1913:   PetscObjectChangeTypeName((PetscObject)ltog,type);
1914:   (*r)(ltog);
1915:   return(0);
1916: }

1918: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;

1920: /*@C
1921:   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.

1923:   Not Collective

1925:   Level: advanced

1927: .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1928: @*/
1929: PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1930: {

1934:   if (ISLocalToGlobalMappingRegisterAllCalled) return(0);
1935:   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;

1937:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);
1938:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);
1939:   return(0);
1940: }