Actual source code: isltog.c


  2: #include <petsc/private/isimpl.h>
  3: #include <petsc/private/hashmapi.h>
  4: #include <petscsf.h>
  5: #include <petscviewer.h>

  7: PetscClassId IS_LTOGM_CLASSID;
  8: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);

 10: typedef struct {
 11:   PetscInt *globals;
 12: } ISLocalToGlobalMapping_Basic;

 14: typedef struct {
 15:   PetscHMapI globalht;
 16: } ISLocalToGlobalMapping_Hash;

 18: /*@C
 19:   ISGetPointRange - Returns a description of the points in an IS suitable for traversal

 21:   Not collective

 23:   Input Parameter:
 24: . pointIS - The IS object

 26:   Output Parameters:
 27: + pStart - The first index, see notes
 28: . pEnd   - One past the last index, see notes
 29: - points - The indices, see notes

 31:   Notes:
 32:   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
 33: $ ISGetPointRange(is, &pStart, &pEnd, &points);
 34: $ for (p = pStart; p < pEnd; ++p) {
 35: $   const PetscInt point = points ? points[p] : p;
 36: $ }
 37: $ ISRestorePointRange(is, &pstart, &pEnd, &points);

 39:   Level: intermediate

 41: .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
 42: @*/
 43: PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 44: {
 45:   PetscInt       numCells, step = 1;
 46:   PetscBool      isStride;

 50:   *pStart = 0;
 51:   *points = NULL;
 52:   ISGetLocalSize(pointIS, &numCells);
 53:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 54:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 55:   *pEnd   = *pStart + numCells;
 56:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 57:   return(0);
 58: }

 60: /*@C
 61:   ISRestorePointRange - Destroys the traversal description

 63:   Not collective

 65:   Input Parameters:
 66: + pointIS - The IS object
 67: . pStart  - The first index, from ISGetPointRange()
 68: . pEnd    - One past the last index, from ISGetPointRange()
 69: - points  - The indices, from ISGetPointRange()

 71:   Notes:
 72:   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
 73: $ ISGetPointRange(is, &pStart, &pEnd, &points);
 74: $ for (p = pStart; p < pEnd; ++p) {
 75: $   const PetscInt point = points ? points[p] : p;
 76: $ }
 77: $ ISRestorePointRange(is, &pstart, &pEnd, &points);

 79:   Level: intermediate

 81: .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
 82: @*/
 83: PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 84: {
 85:   PetscInt       step = 1;
 86:   PetscBool      isStride;

 90:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 91:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 92:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 93:   return(0);
 94: }

 96: /*@C
 97:   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given

 99:   Not collective

101:   Input Parameters:
102: + subpointIS - The IS object to be configured
103: . pStar   t  - The first index of the subrange
104: . pEnd       - One past the last index for the subrange
105: - points     - The indices for the entire range, from ISGetPointRange()

107:   Output Parameters:
108: . subpointIS - The IS object now configured to be a subrange

110:   Notes:
111:   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.

113:   Level: intermediate

115: .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116: @*/
117: PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118: {

122:   if (points) {
123:     ISSetType(subpointIS, ISGENERAL);
124:     ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);
125:   } else {
126:     ISSetType(subpointIS, ISSTRIDE);
127:     ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);
128:   }
129:   return(0);
130: }

132: /* -----------------------------------------------------------------------------------------*/

134: /*
135:     Creates the global mapping information in the ISLocalToGlobalMapping structure

137:     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138: */
139: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140: {
141:   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;

145:   if (mapping->data) return(0);
146:   end   = 0;
147:   start = PETSC_MAX_INT;

149:   for (i=0; i<n; i++) {
150:     if (idx[i] < 0) continue;
151:     if (idx[i] < start) start = idx[i];
152:     if (idx[i] > end)   end   = idx[i];
153:   }
154:   if (start > end) {start = 0; end = -1;}
155:   mapping->globalstart = start;
156:   mapping->globalend   = end;
157:   if (!((PetscObject)mapping)->type_name) {
158:     if ((end - start) > PetscMax(4*n,1000000)) {
159:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);
160:     } else {
161:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);
162:     }
163:   }
164:   (*mapping->ops->globaltolocalmappingsetup)(mapping);
165:   return(0);
166: }

168: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169: {
170:   PetscErrorCode              ierr;
171:   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172:   ISLocalToGlobalMapping_Basic *map;

175:   start            = mapping->globalstart;
176:   end              = mapping->globalend;
177:   PetscNew(&map);
178:   PetscMalloc1(end-start+2,&globals);
179:   map->globals     = globals;
180:   for (i=0; i<end-start+1; i++) globals[i] = -1;
181:   for (i=0; i<n; i++) {
182:     if (idx[i] < 0) continue;
183:     globals[idx[i] - start] = i;
184:   }
185:   mapping->data = (void*)map;
186:   PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
187:   return(0);
188: }

190: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191: {
192:   PetscErrorCode              ierr;
193:   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194:   ISLocalToGlobalMapping_Hash *map;

197:   PetscNew(&map);
198:   PetscHMapICreate(&map->globalht);
199:   for (i=0; i<n; i++) {
200:     if (idx[i] < 0) continue;
201:     PetscHMapISet(map->globalht,idx[i],i);
202:   }
203:   mapping->data = (void*)map;
204:   PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));
205:   return(0);
206: }

208: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209: {
210:   PetscErrorCode              ierr;
211:   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;

214:   if (!map) return(0);
215:   PetscFree(map->globals);
216:   PetscFree(mapping->data);
217:   return(0);
218: }

220: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221: {
222:   PetscErrorCode              ierr;
223:   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;

226:   if (!map) return(0);
227:   PetscHMapIDestroy(&map->globalht);
228:   PetscFree(mapping->data);
229:   return(0);
230: }

232: #define GTOLTYPE _Basic
233: #define GTOLNAME _Basic
234: #define GTOLBS mapping->bs
235: #define GTOL(g, local) do {                  \
236:     local = map->globals[g/bs - start];      \
237:     if (local >= 0) local = bs*local + (g % bs); \
238:   } while (0)

240: #include <../src/vec/is/utils/isltog.h>

242: #define GTOLTYPE _Basic
243: #define GTOLNAME Block_Basic
244: #define GTOLBS 1
245: #define GTOL(g, local) do {                  \
246:     local = map->globals[g - start];         \
247:   } while (0)
248: #include <../src/vec/is/utils/isltog.h>

250: #define GTOLTYPE _Hash
251: #define GTOLNAME _Hash
252: #define GTOLBS mapping->bs
253: #define GTOL(g, local) do {                         \
254:     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255:     if (local >= 0) local = bs*local + (g % bs);   \
256:    } while (0)
257: #include <../src/vec/is/utils/isltog.h>

259: #define GTOLTYPE _Hash
260: #define GTOLNAME Block_Hash
261: #define GTOLBS 1
262: #define GTOL(g, local) do {                         \
263:     (void)PetscHMapIGet(map->globalht,g,&local);    \
264:   } while (0)
265: #include <../src/vec/is/utils/isltog.h>

267: /*@
268:     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object

270:     Not Collective

272:     Input Parameter:
273: .   ltog - local to global mapping

275:     Output Parameter:
276: .   nltog - the duplicated local to global mapping

278:     Level: advanced

280: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281: @*/
282: PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283: {

288:   ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);
289:   return(0);
290: }

292: /*@
293:     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping

295:     Not Collective

297:     Input Parameter:
298: .   ltog - local to global mapping

300:     Output Parameter:
301: .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length

303:     Level: advanced

305: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306: @*/
307: PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308: {
312:   *n = mapping->bs*mapping->n;
313:   return(0);
314: }

316: /*@C
317:    ISLocalToGlobalMappingViewFromOptions - View from Options

319:    Collective on ISLocalToGlobalMapping

321:    Input Parameters:
322: +  A - the local to global mapping object
323: .  obj - Optional object
324: -  name - command line option

326:    Level: intermediate
327: .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328: @*/
329: PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330: {

335:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
336:   return(0);
337: }

339: /*@C
340:     ISLocalToGlobalMappingView - View a local to global mapping

342:     Not Collective

344:     Input Parameters:
345: +   ltog - local to global mapping
346: -   viewer - viewer

348:     Level: advanced

350: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351: @*/
352: PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353: {
354:   PetscInt       i;
355:   PetscMPIInt    rank;
356:   PetscBool      iascii;

361:   if (!viewer) {
362:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
363:   }

366:   MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
367:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
368:   if (iascii) {
369:     PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
370:     PetscViewerASCIIPushSynchronized(viewer);
371:     for (i=0; i<mapping->n; i++) {
372:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
373:     }
374:     PetscViewerFlush(viewer);
375:     PetscViewerASCIIPopSynchronized(viewer);
376:   }
377:   return(0);
378: }

380: /*@
381:     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382:     ordering and a global parallel ordering.

384:     Not collective

386:     Input Parameter:
387: .   is - index set containing the global numbers for each local number

389:     Output Parameter:
390: .   mapping - new mapping data structure

392:     Notes:
393:     the block size of the IS determines the block size of the mapping
394:     Level: advanced

396: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397: @*/
398: PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399: {
401:   PetscInt       n,bs;
402:   const PetscInt *indices;
403:   MPI_Comm       comm;
404:   PetscBool      isblock;


410:   PetscObjectGetComm((PetscObject)is,&comm);
411:   ISGetLocalSize(is,&n);
412:   PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
413:   if (!isblock) {
414:     ISGetIndices(is,&indices);
415:     ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
416:     ISRestoreIndices(is,&indices);
417:   } else {
418:     ISGetBlockSize(is,&bs);
419:     ISBlockGetIndices(is,&indices);
420:     ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
421:     ISBlockRestoreIndices(is,&indices);
422:   }
423:   return(0);
424: }

426: /*@C
427:     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428:     ordering and a global parallel ordering.

430:     Collective

432:     Input Parameter:
433: +   sf - star forest mapping contiguous local indices to (rank, offset)
434: -   start - first global index on this process

436:     Output Parameter:
437: .   mapping - new mapping data structure

439:     Level: advanced

441: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
442: @*/
443: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
444: {
446:   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
447:   const PetscInt *ilocal;
448:   MPI_Comm       comm;


454:   PetscObjectGetComm((PetscObject)sf,&comm);
455:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
456:   if (ilocal) {
457:     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
458:   }
459:   else maxlocal = nleaves;
460:   PetscMalloc1(nroots,&globals);
461:   PetscMalloc1(maxlocal,&ltog);
462:   for (i=0; i<nroots; i++) globals[i] = start + i;
463:   for (i=0; i<maxlocal; i++) ltog[i] = -1;
464:   PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);
465:   PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);
466:   ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
467:   PetscFree(globals);
468:   return(0);
469: }

471: /*@
472:     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping

474:     Not collective

476:     Input Parameters:
477: +   mapping - mapping data structure
478: -   bs - the blocksize

480:     Level: advanced

482: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
483: @*/
484: PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
485: {
486:   PetscInt       *nid;
487:   const PetscInt *oid;
488:   PetscInt       i,cn,on,obs,nn;

493:   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
494:   if (bs == mapping->bs) return(0);
495:   on  = mapping->n;
496:   obs = mapping->bs;
497:   oid = mapping->indices;
498:   nn  = (on*obs)/bs;
499:   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);

501:   PetscMalloc1(nn,&nid);
502:   ISLocalToGlobalMappingGetIndices(mapping,&oid);
503:   for (i=0;i<nn;i++) {
504:     PetscInt j;
505:     for (j=0,cn=0;j<bs-1;j++) {
506:       if (oid[i*bs+j] < 0) { cn++; continue; }
507:       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
508:     }
509:     if (oid[i*bs+j] < 0) cn++;
510:     if (cn) {
511:       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
512:       nid[i] = -1;
513:     } else {
514:       nid[i] = oid[i*bs]/bs;
515:     }
516:   }
517:   ISLocalToGlobalMappingRestoreIndices(mapping,&oid);

519:   mapping->n           = nn;
520:   mapping->bs          = bs;
521:   PetscFree(mapping->indices);
522:   mapping->indices     = nid;
523:   mapping->globalstart = 0;
524:   mapping->globalend   = 0;

526:   /* reset the cached information */
527:   PetscFree(mapping->info_procs);
528:   PetscFree(mapping->info_numprocs);
529:   if (mapping->info_indices) {
530:     PetscInt i;

532:     PetscFree((mapping->info_indices)[0]);
533:     for (i=1; i<mapping->info_nproc; i++) {
534:       PetscFree(mapping->info_indices[i]);
535:     }
536:     PetscFree(mapping->info_indices);
537:   }
538:   mapping->info_cached = PETSC_FALSE;

540:   if (mapping->ops->destroy) {
541:     (*mapping->ops->destroy)(mapping);
542:   }
543:   return(0);
544: }

546: /*@
547:     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
548:     ordering and a global parallel ordering.

550:     Not Collective

552:     Input Parameters:
553: .   mapping - mapping data structure

555:     Output Parameter:
556: .   bs - the blocksize

558:     Level: advanced

560: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
561: @*/
562: PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
563: {
566:   *bs = mapping->bs;
567:   return(0);
568: }

570: /*@
571:     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
572:     ordering and a global parallel ordering.

574:     Not Collective, but communicator may have more than one process

576:     Input Parameters:
577: +   comm - MPI communicator
578: .   bs - the block size
579: .   n - the number of local elements divided by the block size, or equivalently the number of block indices
580: .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
581: -   mode - see PetscCopyMode

583:     Output Parameter:
584: .   mapping - new mapping data structure

586:     Notes:
587:     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1

589:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
590:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
591:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

593:     Level: advanced

595: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
596:           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
597: @*/
598: PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
599: {
601:   PetscInt       *in;


607:   *mapping = NULL;
608:   ISInitializePackage();

610:   PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
611:                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
612:   (*mapping)->n             = n;
613:   (*mapping)->bs            = bs;
614:   (*mapping)->info_cached   = PETSC_FALSE;
615:   (*mapping)->info_free     = PETSC_FALSE;
616:   (*mapping)->info_procs    = NULL;
617:   (*mapping)->info_numprocs = NULL;
618:   (*mapping)->info_indices  = NULL;
619:   (*mapping)->info_nodec    = NULL;
620:   (*mapping)->info_nodei    = NULL;

622:   (*mapping)->ops->globaltolocalmappingapply      = NULL;
623:   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
624:   (*mapping)->ops->destroy                        = NULL;
625:   if (mode == PETSC_COPY_VALUES) {
626:     PetscMalloc1(n,&in);
627:     PetscArraycpy(in,indices,n);
628:     (*mapping)->indices = in;
629:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
630:   } else if (mode == PETSC_OWN_POINTER) {
631:     (*mapping)->indices = (PetscInt*)indices;
632:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
633:   }
634:   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
635:   return(0);
636: }

638: PetscFunctionList ISLocalToGlobalMappingList = NULL;


641: /*@
642:    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.

644:    Not collective

646:    Input Parameters:
647: .  mapping - mapping data structure

649:    Level: advanced

651: @*/
652: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
653: {
654:   PetscErrorCode             ierr;
655:   char                       type[256];
656:   ISLocalToGlobalMappingType defaulttype = "Not set";
657:   PetscBool                  flg;

661:   ISLocalToGlobalMappingRegisterAll();
662:   PetscObjectOptionsBegin((PetscObject)mapping);
663:   PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);
664:   if (flg) {
665:     ISLocalToGlobalMappingSetType(mapping,type);
666:   }
667:   PetscOptionsEnd();
668:   return(0);
669: }

671: /*@
672:    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
673:    ordering and a global parallel ordering.

675:    Note Collective

677:    Input Parameters:
678: .  mapping - mapping data structure

680:    Level: advanced

682: .seealso: ISLocalToGlobalMappingCreate()
683: @*/
684: PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
685: {

689:   if (!*mapping) return(0);
691:   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;return(0);}
692:   PetscFree((*mapping)->indices);
693:   PetscFree((*mapping)->info_procs);
694:   PetscFree((*mapping)->info_numprocs);
695:   if ((*mapping)->info_indices) {
696:     PetscInt i;

698:     PetscFree(((*mapping)->info_indices)[0]);
699:     for (i=1; i<(*mapping)->info_nproc; i++) {
700:       PetscFree(((*mapping)->info_indices)[i]);
701:     }
702:     PetscFree((*mapping)->info_indices);
703:   }
704:   if ((*mapping)->info_nodei) {
705:     PetscFree(((*mapping)->info_nodei)[0]);
706:   }
707:   PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);
708:   if ((*mapping)->ops->destroy) {
709:     (*(*mapping)->ops->destroy)(*mapping);
710:   }
711:   PetscHeaderDestroy(mapping);
712:   *mapping = NULL;
713:   return(0);
714: }

716: /*@
717:     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
718:     a new index set using the global numbering defined in an ISLocalToGlobalMapping
719:     context.

721:     Collective on is

723:     Input Parameters:
724: +   mapping - mapping between local and global numbering
725: -   is - index set in local numbering

727:     Output Parameters:
728: .   newis - index set in global numbering

730:     Notes:
731:     The output IS will have the same communicator of the input IS.

733:     Level: advanced

735: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
736:           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
737: @*/
738: PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
739: {
741:   PetscInt       n,*idxout;
742:   const PetscInt *idxin;


749:   ISGetLocalSize(is,&n);
750:   ISGetIndices(is,&idxin);
751:   PetscMalloc1(n,&idxout);
752:   ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
753:   ISRestoreIndices(is,&idxin);
754:   ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
755:   return(0);
756: }

758: /*@
759:    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
760:    and converts them to the global numbering.

762:    Not collective

764:    Input Parameters:
765: +  mapping - the local to global mapping context
766: .  N - number of integers
767: -  in - input indices in local numbering

769:    Output Parameter:
770: .  out - indices in global numbering

772:    Notes:
773:    The in and out array parameters may be identical.

775:    Level: advanced

777: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
778:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
779:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

781: @*/
782: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
783: {
784:   PetscInt i,bs,Nmax;

788:   bs   = mapping->bs;
789:   Nmax = bs*mapping->n;
790:   if (bs == 1) {
791:     const PetscInt *idx = mapping->indices;
792:     for (i=0; i<N; i++) {
793:       if (in[i] < 0) {
794:         out[i] = in[i];
795:         continue;
796:       }
797:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
798:       out[i] = idx[in[i]];
799:     }
800:   } else {
801:     const PetscInt *idx = mapping->indices;
802:     for (i=0; i<N; i++) {
803:       if (in[i] < 0) {
804:         out[i] = in[i];
805:         continue;
806:       }
807:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
808:       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
809:     }
810:   }
811:   return(0);
812: }

814: /*@
815:    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering

817:    Not collective

819:    Input Parameters:
820: +  mapping - the local to global mapping context
821: .  N - number of integers
822: -  in - input indices in local block numbering

824:    Output Parameter:
825: .  out - indices in global block numbering

827:    Notes:
828:    The in and out array parameters may be identical.

830:    Example:
831:      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
832:      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.

834:    Level: advanced

836: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
837:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
838:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

840: @*/
841: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
842: {

846:   {
847:     PetscInt i,Nmax = mapping->n;
848:     const PetscInt *idx = mapping->indices;

850:     for (i=0; i<N; i++) {
851:       if (in[i] < 0) {
852:         out[i] = in[i];
853:         continue;
854:       }
855:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
856:       out[i] = idx[in[i]];
857:     }
858:   }
859:   return(0);
860: }

862: /*@
863:     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
864:     specified with a global numbering.

866:     Not collective

868:     Input Parameters:
869: +   mapping - mapping between local and global numbering
870: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
871:            IS_GTOLM_DROP - drops the indices with no local value from the output list
872: .   n - number of global indices to map
873: -   idx - global indices to map

875:     Output Parameters:
876: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
877: -   idxout - local index of each global index, one must pass in an array long enough
878:              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
879:              idxout == NULL to determine the required length (returned in nout)
880:              and then allocate the required space and call ISGlobalToLocalMappingApply()
881:              a second time to set the values.

883:     Notes:
884:     Either nout or idxout may be NULL. idx and idxout may be identical.

886:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
887:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
888:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

890:     Level: advanced

892:     Developer Note: The manual page states that idx and idxout may be identical but the calling
893:        sequence declares idx as const so it cannot be the same as idxout.

895: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
896:           ISLocalToGlobalMappingDestroy()
897: @*/
898: PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
899: {

904:   if (!mapping->data) {
905:     ISGlobalToLocalMappingSetUp(mapping);
906:   }
907:   (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);
908:   return(0);
909: }

911: /*@
912:     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
913:     a new index set using the local numbering defined in an ISLocalToGlobalMapping
914:     context.

916:     Not collective

918:     Input Parameters:
919: +   mapping - mapping between local and global numbering
920: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
921:            IS_GTOLM_DROP - drops the indices with no local value from the output list
922: -   is - index set in global numbering

924:     Output Parameters:
925: .   newis - index set in local numbering

927:     Notes:
928:     The output IS will be sequential, as it encodes a purely local operation

930:     Level: advanced

932: .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
933:           ISLocalToGlobalMappingDestroy()
934: @*/
935: PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
936: {
938:   PetscInt       n,nout,*idxout;
939:   const PetscInt *idxin;


946:   ISGetLocalSize(is,&n);
947:   ISGetIndices(is,&idxin);
948:   if (type == IS_GTOLM_MASK) {
949:     PetscMalloc1(n,&idxout);
950:   } else {
951:     ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);
952:     PetscMalloc1(nout,&idxout);
953:   }
954:   ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);
955:   ISRestoreIndices(is,&idxin);
956:   ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);
957:   return(0);
958: }

960: /*@
961:     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
962:     specified with a block global numbering.

964:     Not collective

966:     Input Parameters:
967: +   mapping - mapping between local and global numbering
968: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
969:            IS_GTOLM_DROP - drops the indices with no local value from the output list
970: .   n - number of global indices to map
971: -   idx - global indices to map

973:     Output Parameters:
974: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
975: -   idxout - local index of each global index, one must pass in an array long enough
976:              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
977:              idxout == NULL to determine the required length (returned in nout)
978:              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
979:              a second time to set the values.

981:     Notes:
982:     Either nout or idxout may be NULL. idx and idxout may be identical.

984:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
985:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
986:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

988:     Level: advanced

990:     Developer Note: The manual page states that idx and idxout may be identical but the calling
991:        sequence declares idx as const so it cannot be the same as idxout.

993: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
994:           ISLocalToGlobalMappingDestroy()
995: @*/
996: PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
997:                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
998: {

1003:   if (!mapping->data) {
1004:     ISGlobalToLocalMappingSetUp(mapping);
1005:   }
1006:   (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);
1007:   return(0);
1008: }


1011: /*@C
1012:     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1013:      each index shared by more than one processor

1015:     Collective on ISLocalToGlobalMapping

1017:     Input Parameters:
1018: .   mapping - the mapping from local to global indexing

1020:     Output Parameter:
1021: +   nproc - number of processors that are connected to this one
1022: .   proc - neighboring processors
1023: .   numproc - number of indices for each subdomain (processor)
1024: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1026:     Level: advanced

1028:     Fortran Usage:
1029: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1030: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1031:           PetscInt indices[nproc][numprocmax],ierr)
1032:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1033:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


1036: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1037:           ISLocalToGlobalMappingRestoreInfo()
1038: @*/
1039: PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1040: {

1045:   if (mapping->info_cached) {
1046:     *nproc    = mapping->info_nproc;
1047:     *procs    = mapping->info_procs;
1048:     *numprocs = mapping->info_numprocs;
1049:     *indices  = mapping->info_indices;
1050:   } else {
1051:     ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);
1052:   }
1053:   return(0);
1054: }

1056: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1057: {
1059:   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1060:   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1061:   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1062:   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1063:   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1064:   PetscInt       first_procs,first_numprocs,*first_indices;
1065:   MPI_Request    *recv_waits,*send_waits;
1066:   MPI_Status     recv_status,*send_status,*recv_statuses;
1067:   MPI_Comm       comm;
1068:   PetscBool      debug = PETSC_FALSE;

1071:   PetscObjectGetComm((PetscObject)mapping,&comm);
1072:   MPI_Comm_size(comm,&size);
1073:   MPI_Comm_rank(comm,&rank);
1074:   if (size == 1) {
1075:     *nproc         = 0;
1076:     *procs         = NULL;
1077:     PetscNew(numprocs);
1078:     (*numprocs)[0] = 0;
1079:     PetscNew(indices);
1080:     (*indices)[0]  = NULL;
1081:     /* save info for reuse */
1082:     mapping->info_nproc = *nproc;
1083:     mapping->info_procs = *procs;
1084:     mapping->info_numprocs = *numprocs;
1085:     mapping->info_indices = *indices;
1086:     mapping->info_cached = PETSC_TRUE;
1087:     return(0);
1088:   }

1090:   PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);

1092:   /*
1093:     Notes on ISLocalToGlobalMappingGetBlockInfo

1095:     globally owned node - the nodes that have been assigned to this processor in global
1096:            numbering, just for this routine.

1098:     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1099:            boundary (i.e. is has more than one local owner)

1101:     locally owned node - node that exists on this processors subdomain

1103:     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1104:            local subdomain
1105:   */
1106:   PetscObjectGetNewTag((PetscObject)mapping,&tag1);
1107:   PetscObjectGetNewTag((PetscObject)mapping,&tag2);
1108:   PetscObjectGetNewTag((PetscObject)mapping,&tag3);

1110:   for (i=0; i<n; i++) {
1111:     if (lindices[i] > max) max = lindices[i];
1112:   }
1113:   MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
1114:   Ng++;
1115:   MPI_Comm_size(comm,&size);
1116:   MPI_Comm_rank(comm,&rank);
1117:   scale  = Ng/size + 1;
1118:   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1119:   rstart = scale*rank;

1121:   /* determine ownership ranges of global indices */
1122:   PetscMalloc1(2*size,&nprocs);
1123:   PetscArrayzero(nprocs,2*size);

1125:   /* determine owners of each local node  */
1126:   PetscMalloc1(n,&owner);
1127:   for (i=0; i<n; i++) {
1128:     proc             = lindices[i]/scale; /* processor that globally owns this index */
1129:     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1130:     owner[i]         = proc;
1131:     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1132:   }
1133:   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1134:   PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);

1136:   /* inform other processors of number of messages and max length*/
1137:   PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
1138:   PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);

1140:   /* post receives for owned rows */
1141:   PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
1142:   PetscMalloc1(nrecvs+1,&recv_waits);
1143:   for (i=0; i<nrecvs; i++) {
1144:     MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
1145:   }

1147:   /* pack messages containing lists of local nodes to owners */
1148:   PetscMalloc1(2*n+1,&sends);
1149:   PetscMalloc1(size+1,&starts);
1150:   starts[0] = 0;
1151:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1152:   for (i=0; i<n; i++) {
1153:     sends[starts[owner[i]]++] = lindices[i];
1154:     sends[starts[owner[i]]++] = i;
1155:   }
1156:   PetscFree(owner);
1157:   starts[0] = 0;
1158:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];

1160:   /* send the messages */
1161:   PetscMalloc1(nsends+1,&send_waits);
1162:   PetscMalloc1(nsends+1,&dest);
1163:   cnt = 0;
1164:   for (i=0; i<size; i++) {
1165:     if (nprocs[2*i]) {
1166:       MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
1167:       dest[cnt] = i;
1168:       cnt++;
1169:     }
1170:   }
1171:   PetscFree(starts);

1173:   /* wait on receives */
1174:   PetscMalloc1(nrecvs+1,&source);
1175:   PetscMalloc1(nrecvs+1,&len);
1176:   cnt  = nrecvs;
1177:   PetscCalloc1(ng+1,&nownedsenders);
1178:   while (cnt) {
1179:     MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
1180:     /* unpack receives into our local space */
1181:     MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
1182:     source[imdex] = recv_status.MPI_SOURCE;
1183:     len[imdex]    = len[imdex]/2;
1184:     /* count how many local owners for each of my global owned indices */
1185:     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1186:     cnt--;
1187:   }
1188:   PetscFree(recv_waits);

1190:   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1191:   nowned  = 0;
1192:   nownedm = 0;
1193:   for (i=0; i<ng; i++) {
1194:     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1195:   }

1197:   /* create single array to contain rank of all local owners of each globally owned index */
1198:   PetscMalloc1(nownedm+1,&ownedsenders);
1199:   PetscMalloc1(ng+1,&starts);
1200:   starts[0] = 0;
1201:   for (i=1; i<ng; i++) {
1202:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1203:     else starts[i] = starts[i-1];
1204:   }

1206:   /* for each nontrival globally owned node list all arriving processors */
1207:   for (i=0; i<nrecvs; i++) {
1208:     for (j=0; j<len[i]; j++) {
1209:       node = recvs[2*i*nmax+2*j]-rstart;
1210:       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1211:     }
1212:   }

1214:   if (debug) { /* -----------------------------------  */
1215:     starts[0] = 0;
1216:     for (i=1; i<ng; i++) {
1217:       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1218:       else starts[i] = starts[i-1];
1219:     }
1220:     for (i=0; i<ng; i++) {
1221:       if (nownedsenders[i] > 1) {
1222:         PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
1223:         for (j=0; j<nownedsenders[i]; j++) {
1224:           PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
1225:         }
1226:         PetscSynchronizedPrintf(comm,"\n");
1227:       }
1228:     }
1229:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1230:   } /* -----------------------------------  */

1232:   /* wait on original sends */
1233:   if (nsends) {
1234:     PetscMalloc1(nsends,&send_status);
1235:     MPI_Waitall(nsends,send_waits,send_status);
1236:     PetscFree(send_status);
1237:   }
1238:   PetscFree(send_waits);
1239:   PetscFree(sends);
1240:   PetscFree(nprocs);

1242:   /* pack messages to send back to local owners */
1243:   starts[0] = 0;
1244:   for (i=1; i<ng; i++) {
1245:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1246:     else starts[i] = starts[i-1];
1247:   }
1248:   nsends2 = nrecvs;
1249:   PetscMalloc1(nsends2+1,&nprocs); /* length of each message */
1250:   for (i=0; i<nrecvs; i++) {
1251:     nprocs[i] = 1;
1252:     for (j=0; j<len[i]; j++) {
1253:       node = recvs[2*i*nmax+2*j]-rstart;
1254:       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1255:     }
1256:   }
1257:   nt = 0;
1258:   for (i=0; i<nsends2; i++) nt += nprocs[i];

1260:   PetscMalloc1(nt+1,&sends2);
1261:   PetscMalloc1(nsends2+1,&starts2);

1263:   starts2[0] = 0;
1264:   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1265:   /*
1266:      Each message is 1 + nprocs[i] long, and consists of
1267:        (0) the number of nodes being sent back
1268:        (1) the local node number,
1269:        (2) the number of processors sharing it,
1270:        (3) the processors sharing it
1271:   */
1272:   for (i=0; i<nsends2; i++) {
1273:     cnt = 1;
1274:     sends2[starts2[i]] = 0;
1275:     for (j=0; j<len[i]; j++) {
1276:       node = recvs[2*i*nmax+2*j]-rstart;
1277:       if (nownedsenders[node] > 1) {
1278:         sends2[starts2[i]]++;
1279:         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1280:         sends2[starts2[i]+cnt++] = nownedsenders[node];
1281:         PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);
1282:         cnt += nownedsenders[node];
1283:       }
1284:     }
1285:   }

1287:   /* receive the message lengths */
1288:   nrecvs2 = nsends;
1289:   PetscMalloc1(nrecvs2+1,&lens2);
1290:   PetscMalloc1(nrecvs2+1,&starts3);
1291:   PetscMalloc1(nrecvs2+1,&recv_waits);
1292:   for (i=0; i<nrecvs2; i++) {
1293:     MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
1294:   }

1296:   /* send the message lengths */
1297:   for (i=0; i<nsends2; i++) {
1298:     MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
1299:   }

1301:   /* wait on receives of lens */
1302:   if (nrecvs2) {
1303:     PetscMalloc1(nrecvs2,&recv_statuses);
1304:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1305:     PetscFree(recv_statuses);
1306:   }
1307:   PetscFree(recv_waits);

1309:   starts3[0] = 0;
1310:   nt         = 0;
1311:   for (i=0; i<nrecvs2-1; i++) {
1312:     starts3[i+1] = starts3[i] + lens2[i];
1313:     nt          += lens2[i];
1314:   }
1315:   if (nrecvs2) nt += lens2[nrecvs2-1];

1317:   PetscMalloc1(nt+1,&recvs2);
1318:   PetscMalloc1(nrecvs2+1,&recv_waits);
1319:   for (i=0; i<nrecvs2; i++) {
1320:     MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
1321:   }

1323:   /* send the messages */
1324:   PetscMalloc1(nsends2+1,&send_waits);
1325:   for (i=0; i<nsends2; i++) {
1326:     MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
1327:   }

1329:   /* wait on receives */
1330:   if (nrecvs2) {
1331:     PetscMalloc1(nrecvs2,&recv_statuses);
1332:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1333:     PetscFree(recv_statuses);
1334:   }
1335:   PetscFree(recv_waits);
1336:   PetscFree(nprocs);

1338:   if (debug) { /* -----------------------------------  */
1339:     cnt = 0;
1340:     for (i=0; i<nrecvs2; i++) {
1341:       nt = recvs2[cnt++];
1342:       for (j=0; j<nt; j++) {
1343:         PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
1344:         for (k=0; k<recvs2[cnt+1]; k++) {
1345:           PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
1346:         }
1347:         cnt += 2 + recvs2[cnt+1];
1348:         PetscSynchronizedPrintf(comm,"\n");
1349:       }
1350:     }
1351:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1352:   } /* -----------------------------------  */

1354:   /* count number subdomains for each local node */
1355:   PetscCalloc1(size,&nprocs);
1356:   cnt  = 0;
1357:   for (i=0; i<nrecvs2; i++) {
1358:     nt = recvs2[cnt++];
1359:     for (j=0; j<nt; j++) {
1360:       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1361:       cnt += 2 + recvs2[cnt+1];
1362:     }
1363:   }
1364:   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1365:   *nproc    = nt;
1366:   PetscMalloc1(nt+1,procs);
1367:   PetscMalloc1(nt+1,numprocs);
1368:   PetscMalloc1(nt+1,indices);
1369:   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1370:   PetscMalloc1(size,&bprocs);
1371:   cnt  = 0;
1372:   for (i=0; i<size; i++) {
1373:     if (nprocs[i] > 0) {
1374:       bprocs[i]        = cnt;
1375:       (*procs)[cnt]    = i;
1376:       (*numprocs)[cnt] = nprocs[i];
1377:       PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1378:       cnt++;
1379:     }
1380:   }

1382:   /* make the list of subdomains for each nontrivial local node */
1383:   PetscArrayzero(*numprocs,nt);
1384:   cnt  = 0;
1385:   for (i=0; i<nrecvs2; i++) {
1386:     nt = recvs2[cnt++];
1387:     for (j=0; j<nt; j++) {
1388:       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1389:       cnt += 2 + recvs2[cnt+1];
1390:     }
1391:   }
1392:   PetscFree(bprocs);
1393:   PetscFree(recvs2);

1395:   /* sort the node indexing by their global numbers */
1396:   nt = *nproc;
1397:   for (i=0; i<nt; i++) {
1398:     PetscMalloc1((*numprocs)[i],&tmp);
1399:     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1400:     PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1401:     PetscFree(tmp);
1402:   }

1404:   if (debug) { /* -----------------------------------  */
1405:     nt = *nproc;
1406:     for (i=0; i<nt; i++) {
1407:       PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1408:       for (j=0; j<(*numprocs)[i]; j++) {
1409:         PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1410:       }
1411:       PetscSynchronizedPrintf(comm,"\n");
1412:     }
1413:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1414:   } /* -----------------------------------  */

1416:   /* wait on sends */
1417:   if (nsends2) {
1418:     PetscMalloc1(nsends2,&send_status);
1419:     MPI_Waitall(nsends2,send_waits,send_status);
1420:     PetscFree(send_status);
1421:   }

1423:   PetscFree(starts3);
1424:   PetscFree(dest);
1425:   PetscFree(send_waits);

1427:   PetscFree(nownedsenders);
1428:   PetscFree(ownedsenders);
1429:   PetscFree(starts);
1430:   PetscFree(starts2);
1431:   PetscFree(lens2);

1433:   PetscFree(source);
1434:   PetscFree(len);
1435:   PetscFree(recvs);
1436:   PetscFree(nprocs);
1437:   PetscFree(sends2);

1439:   /* put the information about myself as the first entry in the list */
1440:   first_procs    = (*procs)[0];
1441:   first_numprocs = (*numprocs)[0];
1442:   first_indices  = (*indices)[0];
1443:   for (i=0; i<*nproc; i++) {
1444:     if ((*procs)[i] == rank) {
1445:       (*procs)[0]    = (*procs)[i];
1446:       (*numprocs)[0] = (*numprocs)[i];
1447:       (*indices)[0]  = (*indices)[i];
1448:       (*procs)[i]    = first_procs;
1449:       (*numprocs)[i] = first_numprocs;
1450:       (*indices)[i]  = first_indices;
1451:       break;
1452:     }
1453:   }

1455:   /* save info for reuse */
1456:   mapping->info_nproc = *nproc;
1457:   mapping->info_procs = *procs;
1458:   mapping->info_numprocs = *numprocs;
1459:   mapping->info_indices = *indices;
1460:   mapping->info_cached = PETSC_TRUE;
1461:   return(0);
1462: }

1464: /*@C
1465:     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()

1467:     Collective on ISLocalToGlobalMapping

1469:     Input Parameters:
1470: .   mapping - the mapping from local to global indexing

1472:     Output Parameter:
1473: +   nproc - number of processors that are connected to this one
1474: .   proc - neighboring processors
1475: .   numproc - number of indices for each processor
1476: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1478:     Level: advanced

1480: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1481:           ISLocalToGlobalMappingGetInfo()
1482: @*/
1483: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1484: {

1489:   if (mapping->info_free) {
1490:     PetscFree(*numprocs);
1491:     if (*indices) {
1492:       PetscInt i;

1494:       PetscFree((*indices)[0]);
1495:       for (i=1; i<*nproc; i++) {
1496:         PetscFree((*indices)[i]);
1497:       }
1498:       PetscFree(*indices);
1499:     }
1500:   }
1501:   *nproc    = 0;
1502:   *procs    = NULL;
1503:   *numprocs = NULL;
1504:   *indices  = NULL;
1505:   return(0);
1506: }

1508: /*@C
1509:     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1510:      each index shared by more than one processor

1512:     Collective on ISLocalToGlobalMapping

1514:     Input Parameters:
1515: .   mapping - the mapping from local to global indexing

1517:     Output Parameter:
1518: +   nproc - number of processors that are connected to this one
1519: .   proc - neighboring processors
1520: .   numproc - number of indices for each subdomain (processor)
1521: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1523:     Level: advanced

1525:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1527:     Fortran Usage:
1528: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1529: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1530:           PetscInt indices[nproc][numprocmax],ierr)
1531:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1532:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


1535: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1536:           ISLocalToGlobalMappingRestoreInfo()
1537: @*/
1538: PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1539: {
1541:   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;

1545:   ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);
1546:   if (bs > 1) { /* we need to expand the cached info */
1547:     PetscCalloc1(*nproc,&*indices);
1548:     PetscCalloc1(*nproc,&*numprocs);
1549:     for (i=0; i<*nproc; i++) {
1550:       PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);
1551:       for (j=0; j<bnumprocs[i]; j++) {
1552:         for (k=0; k<bs; k++) {
1553:           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1554:         }
1555:       }
1556:       (*numprocs)[i] = bnumprocs[i]*bs;
1557:     }
1558:     mapping->info_free = PETSC_TRUE;
1559:   } else {
1560:     *numprocs = bnumprocs;
1561:     *indices  = bindices;
1562:   }
1563:   return(0);
1564: }

1566: /*@C
1567:     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()

1569:     Collective on ISLocalToGlobalMapping

1571:     Input Parameters:
1572: .   mapping - the mapping from local to global indexing

1574:     Output Parameter:
1575: +   nproc - number of processors that are connected to this one
1576: .   proc - neighboring processors
1577: .   numproc - number of indices for each processor
1578: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1580:     Level: advanced

1582: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1583:           ISLocalToGlobalMappingGetInfo()
1584: @*/
1585: PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1586: {

1590:   ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1591:   return(0);
1592: }

1594: /*@C
1595:     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node

1597:     Collective on ISLocalToGlobalMapping

1599:     Input Parameters:
1600: .   mapping - the mapping from local to global indexing

1602:     Output Parameter:
1603: +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1604: .   count - number of neighboring processors per node
1605: -   indices - indices of processes sharing the node (sorted)

1607:     Level: advanced

1609:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1611: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1612:           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1613: @*/
1614: PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1615: {
1616:   PetscInt       n;

1621:   ISLocalToGlobalMappingGetSize(mapping,&n);
1622:   if (!mapping->info_nodec) {
1623:     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;

1625:     PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);
1626:     ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1627:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1628:     m = n;
1629:     mapping->info_nodec[n] = 0;
1630:     for (i=1;i<n_neigh;i++) {
1631:       PetscInt j;

1633:       m += n_shared[i];
1634:       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1635:     }
1636:     if (n) { PetscMalloc1(m,&mapping->info_nodei[0]); }
1637:     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1638:     PetscArrayzero(mapping->info_nodec,n);
1639:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1640:     for (i=1;i<n_neigh;i++) {
1641:       PetscInt j;

1643:       for (j=0;j<n_shared[i];j++) {
1644:         PetscInt k = shared[i][j];

1646:         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1647:         mapping->info_nodec[k] += 1;
1648:       }
1649:     }
1650:     for (i=0;i<n;i++) { PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]); }
1651:     ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1652:   }
1653:   if (nnodes)  *nnodes  = n;
1654:   if (count)   *count   = mapping->info_nodec;
1655:   if (indices) *indices = mapping->info_nodei;
1656:   return(0);
1657: }

1659: /*@C
1660:     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()

1662:     Collective on ISLocalToGlobalMapping

1664:     Input Parameters:
1665: .   mapping - the mapping from local to global indexing

1667:     Output Parameter:
1668: +   nnodes - number of local nodes
1669: .   count - number of neighboring processors per node
1670: -   indices - indices of processes sharing the node (sorted)

1672:     Level: advanced

1674: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1675:           ISLocalToGlobalMappingGetInfo()
1676: @*/
1677: PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1678: {
1681:   if (nnodes)  *nnodes  = 0;
1682:   if (count)   *count   = NULL;
1683:   if (indices) *indices = NULL;
1684:   return(0);
1685: }

1687: /*@C
1688:    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped

1690:    Not Collective

1692:    Input Arguments:
1693: . ltog - local to global mapping

1695:    Output Arguments:
1696: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()

1698:    Level: advanced

1700:    Notes:
1701:     ISLocalToGlobalMappingGetSize() returns the length the this array

1703: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1704: @*/
1705: PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1706: {
1710:   if (ltog->bs == 1) {
1711:     *array = ltog->indices;
1712:   } else {
1713:     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1714:     const PetscInt *ii;

1717:     PetscMalloc1(bs*n,&jj);
1718:     *array = jj;
1719:     k    = 0;
1720:     ii   = ltog->indices;
1721:     for (i=0; i<n; i++)
1722:       for (j=0; j<bs; j++)
1723:         jj[k++] = bs*ii[i] + j;
1724:   }
1725:   return(0);
1726: }

1728: /*@C
1729:    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()

1731:    Not Collective

1733:    Input Arguments:
1734: + ltog - local to global mapping
1735: - array - array of indices

1737:    Level: advanced

1739: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1740: @*/
1741: PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1742: {
1746:   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");

1748:   if (ltog->bs > 1) {
1750:     PetscFree(*(void**)array);
1751:   }
1752:   return(0);
1753: }

1755: /*@C
1756:    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block

1758:    Not Collective

1760:    Input Arguments:
1761: . ltog - local to global mapping

1763:    Output Arguments:
1764: . array - array of indices

1766:    Level: advanced

1768: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1769: @*/
1770: PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1771: {
1775:   *array = ltog->indices;
1776:   return(0);
1777: }

1779: /*@C
1780:    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()

1782:    Not Collective

1784:    Input Arguments:
1785: + ltog - local to global mapping
1786: - array - array of indices

1788:    Level: advanced

1790: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1791: @*/
1792: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1793: {
1797:   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1798:   *array = NULL;
1799:   return(0);
1800: }

1802: /*@C
1803:    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings

1805:    Not Collective

1807:    Input Arguments:
1808: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1809: . n - number of mappings to concatenate
1810: - ltogs - local to global mappings

1812:    Output Arguments:
1813: . ltogcat - new mapping

1815:    Note: this currently always returns a mapping with block size of 1

1817:    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case

1819:    Level: advanced

1821: .seealso: ISLocalToGlobalMappingCreate()
1822: @*/
1823: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1824: {
1825:   PetscInt       i,cnt,m,*idx;

1829:   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1833:   for (cnt=0,i=0; i<n; i++) {
1834:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1835:     cnt += m;
1836:   }
1837:   PetscMalloc1(cnt,&idx);
1838:   for (cnt=0,i=0; i<n; i++) {
1839:     const PetscInt *subidx;
1840:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1841:     ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1842:     PetscArraycpy(&idx[cnt],subidx,m);
1843:     ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1844:     cnt += m;
1845:   }
1846:   ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1847:   return(0);
1848: }

1850: /*MC
1851:       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1852:                                     used this is good for only small and moderate size problems.

1854:    Options Database Keys:
1855: .   -islocaltoglobalmapping_type basic - select this method

1857:    Level: beginner

1859: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1860: M*/
1861: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1862: {
1864:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1865:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1866:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1867:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1868:   return(0);
1869: }

1871: /*MC
1872:       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1873:                                     used this is good for large memory problems.

1875:    Options Database Keys:
1876: .   -islocaltoglobalmapping_type hash - select this method


1879:    Notes:
1880:     This is selected automatically for large problems if the user does not set the type.

1882:    Level: beginner

1884: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1885: M*/
1886: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1887: {
1889:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1890:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1891:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1892:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1893:   return(0);
1894: }


1897: /*@C
1898:     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping

1900:    Not Collective

1902:    Input Parameters:
1903: +  sname - name of a new method
1904: -  routine_create - routine to create method context

1906:    Notes:
1907:    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.

1909:    Sample usage:
1910: .vb
1911:    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1912: .ve

1914:    Then, your mapping can be chosen with the procedural interface via
1915: $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1916:    or at runtime via the option
1917: $     -islocaltoglobalmapping_type my_mapper

1919:    Level: advanced

1921: .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH

1923: @*/
1924: PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1925: {

1929:   ISInitializePackage();
1930:   PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);
1931:   return(0);
1932: }

1934: /*@C
1935:    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.

1937:    Logically Collective on ISLocalToGlobalMapping

1939:    Input Parameters:
1940: +  ltog - the ISLocalToGlobalMapping object
1941: -  type - a known method

1943:    Options Database Key:
1944: .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1945:     of available methods (for instance, basic or hash)

1947:    Notes:
1948:    See "petsc/include/petscis.h" for available methods

1950:   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1951:   then set the ISLocalToGlobalMapping type from the options database rather than by using
1952:   this routine.

1954:   Level: intermediate

1956:   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1957:   are accessed by ISLocalToGlobalMappingSetType().

1959: .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()

1961: @*/
1962: PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1963: {
1964:   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1965:   PetscBool      match;


1971:   PetscObjectTypeCompare((PetscObject)ltog,type,&match);
1972:   if (match) return(0);

1974:    PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);
1975:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1976:   /* Destroy the previous private LTOG context */
1977:   if (ltog->ops->destroy) {
1978:     (*ltog->ops->destroy)(ltog);
1979:     ltog->ops->destroy = NULL;
1980:   }
1981:   PetscObjectChangeTypeName((PetscObject)ltog,type);
1982:   (*r)(ltog);
1983:   return(0);
1984: }

1986: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;

1988: /*@C
1989:   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.

1991:   Not Collective

1993:   Level: advanced

1995: .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1996: @*/
1997: PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1998: {

2002:   if (ISLocalToGlobalMappingRegisterAllCalled) return(0);
2003:   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;

2005:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);
2006:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);
2007:   return(0);
2008: }