Actual source code: isltog.c


  2: #include <petsc/private/isimpl.h>
  3: #include <petsc/private/hashmapi.h>
  4: #include <petscsf.h>
  5: #include <petscviewer.h>

  7: PetscClassId IS_LTOGM_CLASSID;
  8: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);

 10: typedef struct {
 11:   PetscInt *globals;
 12: } ISLocalToGlobalMapping_Basic;

 14: typedef struct {
 15:   PetscHMapI globalht;
 16: } ISLocalToGlobalMapping_Hash;

 18: /*@C
 19:   ISGetPointRange - Returns a description of the points in an IS suitable for traversal

 21:   Not collective

 23:   Input Parameter:
 24: . pointIS - The IS object

 26:   Output Parameters:
 27: + pStart - The first index, see notes
 28: . pEnd   - One past the last index, see notes
 29: - points - The indices, see notes

 31:   Notes:
 32:   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
 33: $ ISGetPointRange(is, &pStart, &pEnd, &points);
 34: $ for (p = pStart; p < pEnd; ++p) {
 35: $   const PetscInt point = points ? points[p] : p;
 36: $ }
 37: $ ISRestorePointRange(is, &pstart, &pEnd, &points);

 39:   Level: intermediate

 41: .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
 42: @*/
 43: PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 44: {
 45:   PetscInt       numCells, step = 1;
 46:   PetscBool      isStride;

 50:   *pStart = 0;
 51:   *points = NULL;
 52:   ISGetLocalSize(pointIS, &numCells);
 53:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 54:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 55:   *pEnd   = *pStart + numCells;
 56:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 57:   return(0);
 58: }

 60: /*@C
 61:   ISRestorePointRange - Destroys the traversal description

 63:   Not collective

 65:   Input Parameters:
 66: + pointIS - The IS object
 67: . pStart  - The first index, from ISGetPointRange()
 68: . pEnd    - One past the last index, from ISGetPointRange()
 69: - points  - The indices, from ISGetPointRange()

 71:   Notes:
 72:   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
 73: $ ISGetPointRange(is, &pStart, &pEnd, &points);
 74: $ for (p = pStart; p < pEnd; ++p) {
 75: $   const PetscInt point = points ? points[p] : p;
 76: $ }
 77: $ ISRestorePointRange(is, &pstart, &pEnd, &points);

 79:   Level: intermediate

 81: .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
 82: @*/
 83: PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
 84: {
 85:   PetscInt       step = 1;
 86:   PetscBool      isStride;

 90:   PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);
 91:   if (isStride) {ISStrideGetInfo(pointIS, pStart, &step);}
 92:   if (!isStride || step != 1) {ISGetIndices(pointIS, points);}
 93:   return(0);
 94: }

 96: /*@C
 97:   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given

 99:   Not collective

101:   Input Parameters:
102: + subpointIS - The IS object to be configured
103: . pStar   t  - The first index of the subrange
104: . pEnd       - One past the last index for the subrange
105: - points     - The indices for the entire range, from ISGetPointRange()

107:   Output Parameters:
108: . subpointIS - The IS object now configured to be a subrange

110:   Notes:
111:   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.

113:   Level: intermediate

115: .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116: @*/
117: PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118: {

122:   if (points) {
123:     ISSetType(subpointIS, ISGENERAL);
124:     ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);
125:   } else {
126:     ISSetType(subpointIS, ISSTRIDE);
127:     ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);
128:   }
129:   return(0);
130: }

132: /* -----------------------------------------------------------------------------------------*/

134: /*
135:     Creates the global mapping information in the ISLocalToGlobalMapping structure

137:     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138: */
139: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140: {
141:   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;

145:   if (mapping->data) return(0);
146:   end   = 0;
147:   start = PETSC_MAX_INT;

149:   for (i=0; i<n; i++) {
150:     if (idx[i] < 0) continue;
151:     if (idx[i] < start) start = idx[i];
152:     if (idx[i] > end)   end   = idx[i];
153:   }
154:   if (start > end) {start = 0; end = -1;}
155:   mapping->globalstart = start;
156:   mapping->globalend   = end;
157:   if (!((PetscObject)mapping)->type_name) {
158:     if ((end - start) > PetscMax(4*n,1000000)) {
159:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);
160:     } else {
161:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);
162:     }
163:   }
164:   (*mapping->ops->globaltolocalmappingsetup)(mapping);
165:   return(0);
166: }

168: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169: {
170:   PetscErrorCode              ierr;
171:   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172:   ISLocalToGlobalMapping_Basic *map;

175:   start            = mapping->globalstart;
176:   end              = mapping->globalend;
177:   PetscNew(&map);
178:   PetscMalloc1(end-start+2,&globals);
179:   map->globals     = globals;
180:   for (i=0; i<end-start+1; i++) globals[i] = -1;
181:   for (i=0; i<n; i++) {
182:     if (idx[i] < 0) continue;
183:     globals[idx[i] - start] = i;
184:   }
185:   mapping->data = (void*)map;
186:   PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
187:   return(0);
188: }

190: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191: {
192:   PetscErrorCode              ierr;
193:   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194:   ISLocalToGlobalMapping_Hash *map;

197:   PetscNew(&map);
198:   PetscHMapICreate(&map->globalht);
199:   for (i=0; i<n; i++) {
200:     if (idx[i] < 0) continue;
201:     PetscHMapISet(map->globalht,idx[i],i);
202:   }
203:   mapping->data = (void*)map;
204:   PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));
205:   return(0);
206: }

208: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209: {
210:   PetscErrorCode              ierr;
211:   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;

214:   if (!map) return(0);
215:   PetscFree(map->globals);
216:   PetscFree(mapping->data);
217:   return(0);
218: }

220: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221: {
222:   PetscErrorCode              ierr;
223:   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;

226:   if (!map) return(0);
227:   PetscHMapIDestroy(&map->globalht);
228:   PetscFree(mapping->data);
229:   return(0);
230: }

232: #define GTOLTYPE _Basic
233: #define GTOLNAME _Basic
234: #define GTOLBS mapping->bs
235: #define GTOL(g, local) do {                  \
236:     local = map->globals[g/bs - start];      \
237:     if (local >= 0) local = bs*local + (g % bs); \
238:   } while (0)

240: #include <../src/vec/is/utils/isltog.h>

242: #define GTOLTYPE _Basic
243: #define GTOLNAME Block_Basic
244: #define GTOLBS 1
245: #define GTOL(g, local) do {                  \
246:     local = map->globals[g - start];         \
247:   } while (0)
248: #include <../src/vec/is/utils/isltog.h>

250: #define GTOLTYPE _Hash
251: #define GTOLNAME _Hash
252: #define GTOLBS mapping->bs
253: #define GTOL(g, local) do {                         \
254:     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255:     if (local >= 0) local = bs*local + (g % bs);   \
256:    } while (0)
257: #include <../src/vec/is/utils/isltog.h>

259: #define GTOLTYPE _Hash
260: #define GTOLNAME Block_Hash
261: #define GTOLBS 1
262: #define GTOL(g, local) do {                         \
263:     (void)PetscHMapIGet(map->globalht,g,&local);    \
264:   } while (0)
265: #include <../src/vec/is/utils/isltog.h>

267: /*@
268:     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object

270:     Not Collective

272:     Input Parameter:
273: .   ltog - local to global mapping

275:     Output Parameter:
276: .   nltog - the duplicated local to global mapping

278:     Level: advanced

280: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281: @*/
282: PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283: {

288:   ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);
289:   return(0);
290: }

292: /*@
293:     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping

295:     Not Collective

297:     Input Parameter:
298: .   ltog - local to global mapping

300:     Output Parameter:
301: .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length

303:     Level: advanced

305: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306: @*/
307: PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308: {
312:   *n = mapping->bs*mapping->n;
313:   return(0);
314: }

316: /*@C
317:    ISLocalToGlobalMappingViewFromOptions - View from Options

319:    Collective on ISLocalToGlobalMapping

321:    Input Parameters:
322: +  A - the local to global mapping object
323: .  obj - Optional object
324: -  name - command line option

326:    Level: intermediate
327: .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328: @*/
329: PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330: {

335:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
336:   return(0);
337: }

339: /*@C
340:     ISLocalToGlobalMappingView - View a local to global mapping

342:     Not Collective

344:     Input Parameters:
345: +   ltog - local to global mapping
346: -   viewer - viewer

348:     Level: advanced

350: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351: @*/
352: PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353: {
354:   PetscInt       i;
355:   PetscMPIInt    rank;
356:   PetscBool      iascii;

361:   if (!viewer) {
362:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
363:   }

366:   MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
367:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
368:   if (iascii) {
369:     PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
370:     PetscViewerASCIIPushSynchronized(viewer);
371:     for (i=0; i<mapping->n; i++) {
372:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
373:     }
374:     PetscViewerFlush(viewer);
375:     PetscViewerASCIIPopSynchronized(viewer);
376:   }
377:   return(0);
378: }

380: /*@
381:     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382:     ordering and a global parallel ordering.

384:     Not collective

386:     Input Parameter:
387: .   is - index set containing the global numbers for each local number

389:     Output Parameter:
390: .   mapping - new mapping data structure

392:     Notes:
393:     the block size of the IS determines the block size of the mapping
394:     Level: advanced

396: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397: @*/
398: PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399: {
401:   PetscInt       n,bs;
402:   const PetscInt *indices;
403:   MPI_Comm       comm;
404:   PetscBool      isblock;


410:   PetscObjectGetComm((PetscObject)is,&comm);
411:   ISGetLocalSize(is,&n);
412:   PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
413:   if (!isblock) {
414:     ISGetIndices(is,&indices);
415:     ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
416:     ISRestoreIndices(is,&indices);
417:   } else {
418:     ISGetBlockSize(is,&bs);
419:     ISBlockGetIndices(is,&indices);
420:     ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
421:     ISBlockRestoreIndices(is,&indices);
422:   }
423:   return(0);
424: }

426: /*@C
427:     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428:     ordering and a global parallel ordering.

430:     Collective

432:     Input Parameters:
433: +   sf - star forest mapping contiguous local indices to (rank, offset)
434: -   start - first global index on this process

436:     Output Parameter:
437: .   mapping - new mapping data structure

439:     Level: advanced

441: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
442: @*/
443: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
444: {
446:   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
447:   const PetscInt *ilocal;
448:   MPI_Comm       comm;


454:   PetscObjectGetComm((PetscObject)sf,&comm);
455:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
456:   if (ilocal) {
457:     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
458:   }
459:   else maxlocal = nleaves;
460:   PetscMalloc1(nroots,&globals);
461:   PetscMalloc1(maxlocal,&ltog);
462:   for (i=0; i<nroots; i++) globals[i] = start + i;
463:   for (i=0; i<maxlocal; i++) ltog[i] = -1;
464:   PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);
465:   PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);
466:   ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
467:   PetscFree(globals);
468:   return(0);
469: }

471: /*@
472:     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping

474:     Not collective

476:     Input Parameters:
477: +   mapping - mapping data structure
478: -   bs - the blocksize

480:     Level: advanced

482: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
483: @*/
484: PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
485: {
486:   PetscInt       *nid;
487:   const PetscInt *oid;
488:   PetscInt       i,cn,on,obs,nn;

493:   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
494:   if (bs == mapping->bs) return(0);
495:   on  = mapping->n;
496:   obs = mapping->bs;
497:   oid = mapping->indices;
498:   nn  = (on*obs)/bs;
499:   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);

501:   PetscMalloc1(nn,&nid);
502:   ISLocalToGlobalMappingGetIndices(mapping,&oid);
503:   for (i=0;i<nn;i++) {
504:     PetscInt j;
505:     for (j=0,cn=0;j<bs-1;j++) {
506:       if (oid[i*bs+j] < 0) { cn++; continue; }
507:       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
508:     }
509:     if (oid[i*bs+j] < 0) cn++;
510:     if (cn) {
511:       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
512:       nid[i] = -1;
513:     } else {
514:       nid[i] = oid[i*bs]/bs;
515:     }
516:   }
517:   ISLocalToGlobalMappingRestoreIndices(mapping,&oid);

519:   mapping->n           = nn;
520:   mapping->bs          = bs;
521:   PetscFree(mapping->indices);
522:   mapping->indices     = nid;
523:   mapping->globalstart = 0;
524:   mapping->globalend   = 0;

526:   /* reset the cached information */
527:   PetscFree(mapping->info_procs);
528:   PetscFree(mapping->info_numprocs);
529:   if (mapping->info_indices) {
530:     PetscInt i;

532:     PetscFree((mapping->info_indices)[0]);
533:     for (i=1; i<mapping->info_nproc; i++) {
534:       PetscFree(mapping->info_indices[i]);
535:     }
536:     PetscFree(mapping->info_indices);
537:   }
538:   mapping->info_cached = PETSC_FALSE;

540:   if (mapping->ops->destroy) {
541:     (*mapping->ops->destroy)(mapping);
542:   }
543:   return(0);
544: }

546: /*@
547:     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
548:     ordering and a global parallel ordering.

550:     Not Collective

552:     Input Parameters:
553: .   mapping - mapping data structure

555:     Output Parameter:
556: .   bs - the blocksize

558:     Level: advanced

560: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
561: @*/
562: PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
563: {
566:   *bs = mapping->bs;
567:   return(0);
568: }

570: /*@
571:     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
572:     ordering and a global parallel ordering.

574:     Not Collective, but communicator may have more than one process

576:     Input Parameters:
577: +   comm - MPI communicator
578: .   bs - the block size
579: .   n - the number of local elements divided by the block size, or equivalently the number of block indices
580: .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
581: -   mode - see PetscCopyMode

583:     Output Parameter:
584: .   mapping - new mapping data structure

586:     Notes:
587:     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1

589:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
590:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
591:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

593:     Level: advanced

595: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
596:           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
597: @*/
598: PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
599: {
601:   PetscInt       *in;


607:   *mapping = NULL;
608:   ISInitializePackage();

610:   PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
611:                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
612:   (*mapping)->n             = n;
613:   (*mapping)->bs            = bs;
614:   (*mapping)->info_cached   = PETSC_FALSE;
615:   (*mapping)->info_free     = PETSC_FALSE;
616:   (*mapping)->info_procs    = NULL;
617:   (*mapping)->info_numprocs = NULL;
618:   (*mapping)->info_indices  = NULL;
619:   (*mapping)->info_nodec    = NULL;
620:   (*mapping)->info_nodei    = NULL;

622:   (*mapping)->ops->globaltolocalmappingapply      = NULL;
623:   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
624:   (*mapping)->ops->destroy                        = NULL;
625:   if (mode == PETSC_COPY_VALUES) {
626:     PetscMalloc1(n,&in);
627:     PetscArraycpy(in,indices,n);
628:     (*mapping)->indices = in;
629:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
630:   } else if (mode == PETSC_OWN_POINTER) {
631:     (*mapping)->indices = (PetscInt*)indices;
632:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
633:   }
634:   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
635:   return(0);
636: }

638: PetscFunctionList ISLocalToGlobalMappingList = NULL;

640: /*@
641:    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.

643:    Not collective

645:    Input Parameters:
646: .  mapping - mapping data structure

648:    Level: advanced

650: @*/
651: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
652: {
653:   PetscErrorCode             ierr;
654:   char                       type[256];
655:   ISLocalToGlobalMappingType defaulttype = "Not set";
656:   PetscBool                  flg;

660:   ISLocalToGlobalMappingRegisterAll();
661:   PetscObjectOptionsBegin((PetscObject)mapping);
662:   PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);
663:   if (flg) {
664:     ISLocalToGlobalMappingSetType(mapping,type);
665:   }
666:   PetscOptionsEnd();
667:   return(0);
668: }

670: /*@
671:    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
672:    ordering and a global parallel ordering.

674:    Note Collective

676:    Input Parameters:
677: .  mapping - mapping data structure

679:    Level: advanced

681: .seealso: ISLocalToGlobalMappingCreate()
682: @*/
683: PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
684: {

688:   if (!*mapping) return(0);
690:   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;return(0);}
691:   PetscFree((*mapping)->indices);
692:   PetscFree((*mapping)->info_procs);
693:   PetscFree((*mapping)->info_numprocs);
694:   if ((*mapping)->info_indices) {
695:     PetscInt i;

697:     PetscFree(((*mapping)->info_indices)[0]);
698:     for (i=1; i<(*mapping)->info_nproc; i++) {
699:       PetscFree(((*mapping)->info_indices)[i]);
700:     }
701:     PetscFree((*mapping)->info_indices);
702:   }
703:   if ((*mapping)->info_nodei) {
704:     PetscFree(((*mapping)->info_nodei)[0]);
705:   }
706:   PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);
707:   if ((*mapping)->ops->destroy) {
708:     (*(*mapping)->ops->destroy)(*mapping);
709:   }
710:   PetscHeaderDestroy(mapping);
711:   *mapping = NULL;
712:   return(0);
713: }

715: /*@
716:     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
717:     a new index set using the global numbering defined in an ISLocalToGlobalMapping
718:     context.

720:     Collective on is

722:     Input Parameters:
723: +   mapping - mapping between local and global numbering
724: -   is - index set in local numbering

726:     Output Parameters:
727: .   newis - index set in global numbering

729:     Notes:
730:     The output IS will have the same communicator of the input IS.

732:     Level: advanced

734: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
735:           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
736: @*/
737: PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
738: {
740:   PetscInt       n,*idxout;
741:   const PetscInt *idxin;


748:   ISGetLocalSize(is,&n);
749:   ISGetIndices(is,&idxin);
750:   PetscMalloc1(n,&idxout);
751:   ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
752:   ISRestoreIndices(is,&idxin);
753:   ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
754:   return(0);
755: }

757: /*@
758:    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
759:    and converts them to the global numbering.

761:    Not collective

763:    Input Parameters:
764: +  mapping - the local to global mapping context
765: .  N - number of integers
766: -  in - input indices in local numbering

768:    Output Parameter:
769: .  out - indices in global numbering

771:    Notes:
772:    The in and out array parameters may be identical.

774:    Level: advanced

776: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
777:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
778:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

780: @*/
781: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
782: {
783:   PetscInt i,bs,Nmax;

787:   bs   = mapping->bs;
788:   Nmax = bs*mapping->n;
789:   if (bs == 1) {
790:     const PetscInt *idx = mapping->indices;
791:     for (i=0; i<N; i++) {
792:       if (in[i] < 0) {
793:         out[i] = in[i];
794:         continue;
795:       }
796:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
797:       out[i] = idx[in[i]];
798:     }
799:   } else {
800:     const PetscInt *idx = mapping->indices;
801:     for (i=0; i<N; i++) {
802:       if (in[i] < 0) {
803:         out[i] = in[i];
804:         continue;
805:       }
806:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
807:       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
808:     }
809:   }
810:   return(0);
811: }

813: /*@
814:    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering

816:    Not collective

818:    Input Parameters:
819: +  mapping - the local to global mapping context
820: .  N - number of integers
821: -  in - input indices in local block numbering

823:    Output Parameter:
824: .  out - indices in global block numbering

826:    Notes:
827:    The in and out array parameters may be identical.

829:    Example:
830:      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
831:      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.

833:    Level: advanced

835: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
836:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
837:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

839: @*/
840: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
841: {

845:   {
846:     PetscInt i,Nmax = mapping->n;
847:     const PetscInt *idx = mapping->indices;

849:     for (i=0; i<N; i++) {
850:       if (in[i] < 0) {
851:         out[i] = in[i];
852:         continue;
853:       }
854:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
855:       out[i] = idx[in[i]];
856:     }
857:   }
858:   return(0);
859: }

861: /*@
862:     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
863:     specified with a global numbering.

865:     Not collective

867:     Input Parameters:
868: +   mapping - mapping between local and global numbering
869: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
870:            IS_GTOLM_DROP - drops the indices with no local value from the output list
871: .   n - number of global indices to map
872: -   idx - global indices to map

874:     Output Parameters:
875: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
876: -   idxout - local index of each global index, one must pass in an array long enough
877:              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
878:              idxout == NULL to determine the required length (returned in nout)
879:              and then allocate the required space and call ISGlobalToLocalMappingApply()
880:              a second time to set the values.

882:     Notes:
883:     Either nout or idxout may be NULL. idx and idxout may be identical.

885:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
886:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
887:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

889:     Level: advanced

891:     Developer Note: The manual page states that idx and idxout may be identical but the calling
892:        sequence declares idx as const so it cannot be the same as idxout.

894: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
895:           ISLocalToGlobalMappingDestroy()
896: @*/
897: PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
898: {

903:   if (!mapping->data) {
904:     ISGlobalToLocalMappingSetUp(mapping);
905:   }
906:   (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);
907:   return(0);
908: }

910: /*@
911:     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
912:     a new index set using the local numbering defined in an ISLocalToGlobalMapping
913:     context.

915:     Not collective

917:     Input Parameters:
918: +   mapping - mapping between local and global numbering
919: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
920:            IS_GTOLM_DROP - drops the indices with no local value from the output list
921: -   is - index set in global numbering

923:     Output Parameters:
924: .   newis - index set in local numbering

926:     Notes:
927:     The output IS will be sequential, as it encodes a purely local operation

929:     Level: advanced

931: .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
932:           ISLocalToGlobalMappingDestroy()
933: @*/
934: PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
935: {
937:   PetscInt       n,nout,*idxout;
938:   const PetscInt *idxin;


945:   ISGetLocalSize(is,&n);
946:   ISGetIndices(is,&idxin);
947:   if (type == IS_GTOLM_MASK) {
948:     PetscMalloc1(n,&idxout);
949:   } else {
950:     ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);
951:     PetscMalloc1(nout,&idxout);
952:   }
953:   ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);
954:   ISRestoreIndices(is,&idxin);
955:   ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);
956:   return(0);
957: }

959: /*@
960:     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
961:     specified with a block global numbering.

963:     Not collective

965:     Input Parameters:
966: +   mapping - mapping between local and global numbering
967: .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
968:            IS_GTOLM_DROP - drops the indices with no local value from the output list
969: .   n - number of global indices to map
970: -   idx - global indices to map

972:     Output Parameters:
973: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
974: -   idxout - local index of each global index, one must pass in an array long enough
975:              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
976:              idxout == NULL to determine the required length (returned in nout)
977:              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
978:              a second time to set the values.

980:     Notes:
981:     Either nout or idxout may be NULL. idx and idxout may be identical.

983:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
984:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
985:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

987:     Level: advanced

989:     Developer Note: The manual page states that idx and idxout may be identical but the calling
990:        sequence declares idx as const so it cannot be the same as idxout.

992: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
993:           ISLocalToGlobalMappingDestroy()
994: @*/
995: PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
996:                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
997: {

1002:   if (!mapping->data) {
1003:     ISGlobalToLocalMappingSetUp(mapping);
1004:   }
1005:   (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);
1006:   return(0);
1007: }

1009: /*@C
1010:     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1011:      each index shared by more than one processor

1013:     Collective on ISLocalToGlobalMapping

1015:     Input Parameter:
1016: .   mapping - the mapping from local to global indexing

1018:     Output Parameters:
1019: +   nproc - number of processors that are connected to this one
1020: .   proc - neighboring processors
1021: .   numproc - number of indices for each subdomain (processor)
1022: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1024:     Level: advanced

1026:     Fortran Usage:
1027: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1028: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1029:           PetscInt indices[nproc][numprocmax],ierr)
1030:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1031:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.

1033: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1034:           ISLocalToGlobalMappingRestoreInfo()
1035: @*/
1036: PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1037: {

1042:   if (mapping->info_cached) {
1043:     *nproc    = mapping->info_nproc;
1044:     *procs    = mapping->info_procs;
1045:     *numprocs = mapping->info_numprocs;
1046:     *indices  = mapping->info_indices;
1047:   } else {
1048:     ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);
1049:   }
1050:   return(0);
1051: }

1053: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1054: {
1056:   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1057:   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1058:   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1059:   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1060:   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1061:   PetscInt       first_procs,first_numprocs,*first_indices;
1062:   MPI_Request    *recv_waits,*send_waits;
1063:   MPI_Status     recv_status,*send_status,*recv_statuses;
1064:   MPI_Comm       comm;
1065:   PetscBool      debug = PETSC_FALSE;

1068:   PetscObjectGetComm((PetscObject)mapping,&comm);
1069:   MPI_Comm_size(comm,&size);
1070:   MPI_Comm_rank(comm,&rank);
1071:   if (size == 1) {
1072:     *nproc         = 0;
1073:     *procs         = NULL;
1074:     PetscNew(numprocs);
1075:     (*numprocs)[0] = 0;
1076:     PetscNew(indices);
1077:     (*indices)[0]  = NULL;
1078:     /* save info for reuse */
1079:     mapping->info_nproc = *nproc;
1080:     mapping->info_procs = *procs;
1081:     mapping->info_numprocs = *numprocs;
1082:     mapping->info_indices = *indices;
1083:     mapping->info_cached = PETSC_TRUE;
1084:     return(0);
1085:   }

1087:   PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);

1089:   /*
1090:     Notes on ISLocalToGlobalMappingGetBlockInfo

1092:     globally owned node - the nodes that have been assigned to this processor in global
1093:            numbering, just for this routine.

1095:     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1096:            boundary (i.e. is has more than one local owner)

1098:     locally owned node - node that exists on this processors subdomain

1100:     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1101:            local subdomain
1102:   */
1103:   PetscObjectGetNewTag((PetscObject)mapping,&tag1);
1104:   PetscObjectGetNewTag((PetscObject)mapping,&tag2);
1105:   PetscObjectGetNewTag((PetscObject)mapping,&tag3);

1107:   for (i=0; i<n; i++) {
1108:     if (lindices[i] > max) max = lindices[i];
1109:   }
1110:   MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
1111:   Ng++;
1112:   MPI_Comm_size(comm,&size);
1113:   MPI_Comm_rank(comm,&rank);
1114:   scale  = Ng/size + 1;
1115:   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1116:   rstart = scale*rank;

1118:   /* determine ownership ranges of global indices */
1119:   PetscMalloc1(2*size,&nprocs);
1120:   PetscArrayzero(nprocs,2*size);

1122:   /* determine owners of each local node  */
1123:   PetscMalloc1(n,&owner);
1124:   for (i=0; i<n; i++) {
1125:     proc             = lindices[i]/scale; /* processor that globally owns this index */
1126:     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1127:     owner[i]         = proc;
1128:     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1129:   }
1130:   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1131:   PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);

1133:   /* inform other processors of number of messages and max length*/
1134:   PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
1135:   PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);

1137:   /* post receives for owned rows */
1138:   PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
1139:   PetscMalloc1(nrecvs+1,&recv_waits);
1140:   for (i=0; i<nrecvs; i++) {
1141:     MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
1142:   }

1144:   /* pack messages containing lists of local nodes to owners */
1145:   PetscMalloc1(2*n+1,&sends);
1146:   PetscMalloc1(size+1,&starts);
1147:   starts[0] = 0;
1148:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1149:   for (i=0; i<n; i++) {
1150:     sends[starts[owner[i]]++] = lindices[i];
1151:     sends[starts[owner[i]]++] = i;
1152:   }
1153:   PetscFree(owner);
1154:   starts[0] = 0;
1155:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];

1157:   /* send the messages */
1158:   PetscMalloc1(nsends+1,&send_waits);
1159:   PetscMalloc1(nsends+1,&dest);
1160:   cnt = 0;
1161:   for (i=0; i<size; i++) {
1162:     if (nprocs[2*i]) {
1163:       MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
1164:       dest[cnt] = i;
1165:       cnt++;
1166:     }
1167:   }
1168:   PetscFree(starts);

1170:   /* wait on receives */
1171:   PetscMalloc1(nrecvs+1,&source);
1172:   PetscMalloc1(nrecvs+1,&len);
1173:   cnt  = nrecvs;
1174:   PetscCalloc1(ng+1,&nownedsenders);
1175:   while (cnt) {
1176:     MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
1177:     /* unpack receives into our local space */
1178:     MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
1179:     source[imdex] = recv_status.MPI_SOURCE;
1180:     len[imdex]    = len[imdex]/2;
1181:     /* count how many local owners for each of my global owned indices */
1182:     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1183:     cnt--;
1184:   }
1185:   PetscFree(recv_waits);

1187:   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1188:   nowned  = 0;
1189:   nownedm = 0;
1190:   for (i=0; i<ng; i++) {
1191:     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1192:   }

1194:   /* create single array to contain rank of all local owners of each globally owned index */
1195:   PetscMalloc1(nownedm+1,&ownedsenders);
1196:   PetscMalloc1(ng+1,&starts);
1197:   starts[0] = 0;
1198:   for (i=1; i<ng; i++) {
1199:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1200:     else starts[i] = starts[i-1];
1201:   }

1203:   /* for each nontrival globally owned node list all arriving processors */
1204:   for (i=0; i<nrecvs; i++) {
1205:     for (j=0; j<len[i]; j++) {
1206:       node = recvs[2*i*nmax+2*j]-rstart;
1207:       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1208:     }
1209:   }

1211:   if (debug) { /* -----------------------------------  */
1212:     starts[0] = 0;
1213:     for (i=1; i<ng; i++) {
1214:       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1215:       else starts[i] = starts[i-1];
1216:     }
1217:     for (i=0; i<ng; i++) {
1218:       if (nownedsenders[i] > 1) {
1219:         PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
1220:         for (j=0; j<nownedsenders[i]; j++) {
1221:           PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
1222:         }
1223:         PetscSynchronizedPrintf(comm,"\n");
1224:       }
1225:     }
1226:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1227:   } /* -----------------------------------  */

1229:   /* wait on original sends */
1230:   if (nsends) {
1231:     PetscMalloc1(nsends,&send_status);
1232:     MPI_Waitall(nsends,send_waits,send_status);
1233:     PetscFree(send_status);
1234:   }
1235:   PetscFree(send_waits);
1236:   PetscFree(sends);
1237:   PetscFree(nprocs);

1239:   /* pack messages to send back to local owners */
1240:   starts[0] = 0;
1241:   for (i=1; i<ng; i++) {
1242:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1243:     else starts[i] = starts[i-1];
1244:   }
1245:   nsends2 = nrecvs;
1246:   PetscMalloc1(nsends2+1,&nprocs); /* length of each message */
1247:   for (i=0; i<nrecvs; i++) {
1248:     nprocs[i] = 1;
1249:     for (j=0; j<len[i]; j++) {
1250:       node = recvs[2*i*nmax+2*j]-rstart;
1251:       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1252:     }
1253:   }
1254:   nt = 0;
1255:   for (i=0; i<nsends2; i++) nt += nprocs[i];

1257:   PetscMalloc1(nt+1,&sends2);
1258:   PetscMalloc1(nsends2+1,&starts2);

1260:   starts2[0] = 0;
1261:   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1262:   /*
1263:      Each message is 1 + nprocs[i] long, and consists of
1264:        (0) the number of nodes being sent back
1265:        (1) the local node number,
1266:        (2) the number of processors sharing it,
1267:        (3) the processors sharing it
1268:   */
1269:   for (i=0; i<nsends2; i++) {
1270:     cnt = 1;
1271:     sends2[starts2[i]] = 0;
1272:     for (j=0; j<len[i]; j++) {
1273:       node = recvs[2*i*nmax+2*j]-rstart;
1274:       if (nownedsenders[node] > 1) {
1275:         sends2[starts2[i]]++;
1276:         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1277:         sends2[starts2[i]+cnt++] = nownedsenders[node];
1278:         PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);
1279:         cnt += nownedsenders[node];
1280:       }
1281:     }
1282:   }

1284:   /* receive the message lengths */
1285:   nrecvs2 = nsends;
1286:   PetscMalloc1(nrecvs2+1,&lens2);
1287:   PetscMalloc1(nrecvs2+1,&starts3);
1288:   PetscMalloc1(nrecvs2+1,&recv_waits);
1289:   for (i=0; i<nrecvs2; i++) {
1290:     MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
1291:   }

1293:   /* send the message lengths */
1294:   for (i=0; i<nsends2; i++) {
1295:     MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
1296:   }

1298:   /* wait on receives of lens */
1299:   if (nrecvs2) {
1300:     PetscMalloc1(nrecvs2,&recv_statuses);
1301:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1302:     PetscFree(recv_statuses);
1303:   }
1304:   PetscFree(recv_waits);

1306:   starts3[0] = 0;
1307:   nt         = 0;
1308:   for (i=0; i<nrecvs2-1; i++) {
1309:     starts3[i+1] = starts3[i] + lens2[i];
1310:     nt          += lens2[i];
1311:   }
1312:   if (nrecvs2) nt += lens2[nrecvs2-1];

1314:   PetscMalloc1(nt+1,&recvs2);
1315:   PetscMalloc1(nrecvs2+1,&recv_waits);
1316:   for (i=0; i<nrecvs2; i++) {
1317:     MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
1318:   }

1320:   /* send the messages */
1321:   PetscMalloc1(nsends2+1,&send_waits);
1322:   for (i=0; i<nsends2; i++) {
1323:     MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
1324:   }

1326:   /* wait on receives */
1327:   if (nrecvs2) {
1328:     PetscMalloc1(nrecvs2,&recv_statuses);
1329:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1330:     PetscFree(recv_statuses);
1331:   }
1332:   PetscFree(recv_waits);
1333:   PetscFree(nprocs);

1335:   if (debug) { /* -----------------------------------  */
1336:     cnt = 0;
1337:     for (i=0; i<nrecvs2; i++) {
1338:       nt = recvs2[cnt++];
1339:       for (j=0; j<nt; j++) {
1340:         PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
1341:         for (k=0; k<recvs2[cnt+1]; k++) {
1342:           PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
1343:         }
1344:         cnt += 2 + recvs2[cnt+1];
1345:         PetscSynchronizedPrintf(comm,"\n");
1346:       }
1347:     }
1348:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1349:   } /* -----------------------------------  */

1351:   /* count number subdomains for each local node */
1352:   PetscCalloc1(size,&nprocs);
1353:   cnt  = 0;
1354:   for (i=0; i<nrecvs2; i++) {
1355:     nt = recvs2[cnt++];
1356:     for (j=0; j<nt; j++) {
1357:       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1358:       cnt += 2 + recvs2[cnt+1];
1359:     }
1360:   }
1361:   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1362:   *nproc    = nt;
1363:   PetscMalloc1(nt+1,procs);
1364:   PetscMalloc1(nt+1,numprocs);
1365:   PetscMalloc1(nt+1,indices);
1366:   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1367:   PetscMalloc1(size,&bprocs);
1368:   cnt  = 0;
1369:   for (i=0; i<size; i++) {
1370:     if (nprocs[i] > 0) {
1371:       bprocs[i]        = cnt;
1372:       (*procs)[cnt]    = i;
1373:       (*numprocs)[cnt] = nprocs[i];
1374:       PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1375:       cnt++;
1376:     }
1377:   }

1379:   /* make the list of subdomains for each nontrivial local node */
1380:   PetscArrayzero(*numprocs,nt);
1381:   cnt  = 0;
1382:   for (i=0; i<nrecvs2; i++) {
1383:     nt = recvs2[cnt++];
1384:     for (j=0; j<nt; j++) {
1385:       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1386:       cnt += 2 + recvs2[cnt+1];
1387:     }
1388:   }
1389:   PetscFree(bprocs);
1390:   PetscFree(recvs2);

1392:   /* sort the node indexing by their global numbers */
1393:   nt = *nproc;
1394:   for (i=0; i<nt; i++) {
1395:     PetscMalloc1((*numprocs)[i],&tmp);
1396:     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1397:     PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1398:     PetscFree(tmp);
1399:   }

1401:   if (debug) { /* -----------------------------------  */
1402:     nt = *nproc;
1403:     for (i=0; i<nt; i++) {
1404:       PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1405:       for (j=0; j<(*numprocs)[i]; j++) {
1406:         PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1407:       }
1408:       PetscSynchronizedPrintf(comm,"\n");
1409:     }
1410:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1411:   } /* -----------------------------------  */

1413:   /* wait on sends */
1414:   if (nsends2) {
1415:     PetscMalloc1(nsends2,&send_status);
1416:     MPI_Waitall(nsends2,send_waits,send_status);
1417:     PetscFree(send_status);
1418:   }

1420:   PetscFree(starts3);
1421:   PetscFree(dest);
1422:   PetscFree(send_waits);

1424:   PetscFree(nownedsenders);
1425:   PetscFree(ownedsenders);
1426:   PetscFree(starts);
1427:   PetscFree(starts2);
1428:   PetscFree(lens2);

1430:   PetscFree(source);
1431:   PetscFree(len);
1432:   PetscFree(recvs);
1433:   PetscFree(nprocs);
1434:   PetscFree(sends2);

1436:   /* put the information about myself as the first entry in the list */
1437:   first_procs    = (*procs)[0];
1438:   first_numprocs = (*numprocs)[0];
1439:   first_indices  = (*indices)[0];
1440:   for (i=0; i<*nproc; i++) {
1441:     if ((*procs)[i] == rank) {
1442:       (*procs)[0]    = (*procs)[i];
1443:       (*numprocs)[0] = (*numprocs)[i];
1444:       (*indices)[0]  = (*indices)[i];
1445:       (*procs)[i]    = first_procs;
1446:       (*numprocs)[i] = first_numprocs;
1447:       (*indices)[i]  = first_indices;
1448:       break;
1449:     }
1450:   }

1452:   /* save info for reuse */
1453:   mapping->info_nproc = *nproc;
1454:   mapping->info_procs = *procs;
1455:   mapping->info_numprocs = *numprocs;
1456:   mapping->info_indices = *indices;
1457:   mapping->info_cached = PETSC_TRUE;
1458:   return(0);
1459: }

1461: /*@C
1462:     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()

1464:     Collective on ISLocalToGlobalMapping

1466:     Input Parameter:
1467: .   mapping - the mapping from local to global indexing

1469:     Output Parameters:
1470: +   nproc - number of processors that are connected to this one
1471: .   proc - neighboring processors
1472: .   numproc - number of indices for each processor
1473: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1475:     Level: advanced

1477: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1478:           ISLocalToGlobalMappingGetInfo()
1479: @*/
1480: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1481: {

1486:   if (mapping->info_free) {
1487:     PetscFree(*numprocs);
1488:     if (*indices) {
1489:       PetscInt i;

1491:       PetscFree((*indices)[0]);
1492:       for (i=1; i<*nproc; i++) {
1493:         PetscFree((*indices)[i]);
1494:       }
1495:       PetscFree(*indices);
1496:     }
1497:   }
1498:   *nproc    = 0;
1499:   *procs    = NULL;
1500:   *numprocs = NULL;
1501:   *indices  = NULL;
1502:   return(0);
1503: }

1505: /*@C
1506:     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1507:      each index shared by more than one processor

1509:     Collective on ISLocalToGlobalMapping

1511:     Input Parameter:
1512: .   mapping - the mapping from local to global indexing

1514:     Output Parameters:
1515: +   nproc - number of processors that are connected to this one
1516: .   proc - neighboring processors
1517: .   numproc - number of indices for each subdomain (processor)
1518: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1520:     Level: advanced

1522:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1524:     Fortran Usage:
1525: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1526: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1527:           PetscInt indices[nproc][numprocmax],ierr)
1528:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1529:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.

1531: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1532:           ISLocalToGlobalMappingRestoreInfo()
1533: @*/
1534: PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1535: {
1537:   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;

1541:   ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);
1542:   if (bs > 1) { /* we need to expand the cached info */
1543:     PetscCalloc1(*nproc,&*indices);
1544:     PetscCalloc1(*nproc,&*numprocs);
1545:     for (i=0; i<*nproc; i++) {
1546:       PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);
1547:       for (j=0; j<bnumprocs[i]; j++) {
1548:         for (k=0; k<bs; k++) {
1549:           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1550:         }
1551:       }
1552:       (*numprocs)[i] = bnumprocs[i]*bs;
1553:     }
1554:     mapping->info_free = PETSC_TRUE;
1555:   } else {
1556:     *numprocs = bnumprocs;
1557:     *indices  = bindices;
1558:   }
1559:   return(0);
1560: }

1562: /*@C
1563:     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()

1565:     Collective on ISLocalToGlobalMapping

1567:     Input Parameter:
1568: .   mapping - the mapping from local to global indexing

1570:     Output Parameters:
1571: +   nproc - number of processors that are connected to this one
1572: .   proc - neighboring processors
1573: .   numproc - number of indices for each processor
1574: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1576:     Level: advanced

1578: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1579:           ISLocalToGlobalMappingGetInfo()
1580: @*/
1581: PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1582: {

1586:   ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1587:   return(0);
1588: }

1590: /*@C
1591:     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node

1593:     Collective on ISLocalToGlobalMapping

1595:     Input Parameter:
1596: .   mapping - the mapping from local to global indexing

1598:     Output Parameters:
1599: +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1600: .   count - number of neighboring processors per node
1601: -   indices - indices of processes sharing the node (sorted)

1603:     Level: advanced

1605:     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.

1607: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1608:           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1609: @*/
1610: PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1611: {
1612:   PetscInt       n;

1617:   ISLocalToGlobalMappingGetSize(mapping,&n);
1618:   if (!mapping->info_nodec) {
1619:     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;

1621:     PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);
1622:     ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1623:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1624:     m = n;
1625:     mapping->info_nodec[n] = 0;
1626:     for (i=1;i<n_neigh;i++) {
1627:       PetscInt j;

1629:       m += n_shared[i];
1630:       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1631:     }
1632:     if (n) { PetscMalloc1(m,&mapping->info_nodei[0]); }
1633:     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1634:     PetscArrayzero(mapping->info_nodec,n);
1635:     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1636:     for (i=1;i<n_neigh;i++) {
1637:       PetscInt j;

1639:       for (j=0;j<n_shared[i];j++) {
1640:         PetscInt k = shared[i][j];

1642:         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1643:         mapping->info_nodec[k] += 1;
1644:       }
1645:     }
1646:     for (i=0;i<n;i++) { PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]); }
1647:     ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);
1648:   }
1649:   if (nnodes)  *nnodes  = n;
1650:   if (count)   *count   = mapping->info_nodec;
1651:   if (indices) *indices = mapping->info_nodei;
1652:   return(0);
1653: }

1655: /*@C
1656:     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()

1658:     Collective on ISLocalToGlobalMapping

1660:     Input Parameter:
1661: .   mapping - the mapping from local to global indexing

1663:     Output Parameters:
1664: +   nnodes - number of local nodes
1665: .   count - number of neighboring processors per node
1666: -   indices - indices of processes sharing the node (sorted)

1668:     Level: advanced

1670: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1671:           ISLocalToGlobalMappingGetInfo()
1672: @*/
1673: PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1674: {
1677:   if (nnodes)  *nnodes  = 0;
1678:   if (count)   *count   = NULL;
1679:   if (indices) *indices = NULL;
1680:   return(0);
1681: }

1683: /*@C
1684:    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped

1686:    Not Collective

1688:    Input Parameter:
1689: . ltog - local to global mapping

1691:    Output Parameter:
1692: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()

1694:    Level: advanced

1696:    Notes:
1697:     ISLocalToGlobalMappingGetSize() returns the length the this array

1699: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1700: @*/
1701: PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1702: {
1706:   if (ltog->bs == 1) {
1707:     *array = ltog->indices;
1708:   } else {
1709:     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1710:     const PetscInt *ii;

1713:     PetscMalloc1(bs*n,&jj);
1714:     *array = jj;
1715:     k    = 0;
1716:     ii   = ltog->indices;
1717:     for (i=0; i<n; i++)
1718:       for (j=0; j<bs; j++)
1719:         jj[k++] = bs*ii[i] + j;
1720:   }
1721:   return(0);
1722: }

1724: /*@C
1725:    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()

1727:    Not Collective

1729:    Input Parameters:
1730: + ltog - local to global mapping
1731: - array - array of indices

1733:    Level: advanced

1735: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1736: @*/
1737: PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1738: {
1742:   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");

1744:   if (ltog->bs > 1) {
1746:     PetscFree(*(void**)array);
1747:   }
1748:   return(0);
1749: }

1751: /*@C
1752:    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block

1754:    Not Collective

1756:    Input Parameter:
1757: . ltog - local to global mapping

1759:    Output Parameter:
1760: . array - array of indices

1762:    Level: advanced

1764: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1765: @*/
1766: PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1767: {
1771:   *array = ltog->indices;
1772:   return(0);
1773: }

1775: /*@C
1776:    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()

1778:    Not Collective

1780:    Input Parameters:
1781: + ltog - local to global mapping
1782: - array - array of indices

1784:    Level: advanced

1786: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1787: @*/
1788: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1789: {
1793:   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1794:   *array = NULL;
1795:   return(0);
1796: }

1798: /*@C
1799:    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings

1801:    Not Collective

1803:    Input Parameters:
1804: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1805: . n - number of mappings to concatenate
1806: - ltogs - local to global mappings

1808:    Output Parameter:
1809: . ltogcat - new mapping

1811:    Note: this currently always returns a mapping with block size of 1

1813:    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case

1815:    Level: advanced

1817: .seealso: ISLocalToGlobalMappingCreate()
1818: @*/
1819: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1820: {
1821:   PetscInt       i,cnt,m,*idx;

1825:   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1829:   for (cnt=0,i=0; i<n; i++) {
1830:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1831:     cnt += m;
1832:   }
1833:   PetscMalloc1(cnt,&idx);
1834:   for (cnt=0,i=0; i<n; i++) {
1835:     const PetscInt *subidx;
1836:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1837:     ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1838:     PetscArraycpy(&idx[cnt],subidx,m);
1839:     ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1840:     cnt += m;
1841:   }
1842:   ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1843:   return(0);
1844: }

1846: /*MC
1847:       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1848:                                     used this is good for only small and moderate size problems.

1850:    Options Database Keys:
1851: .   -islocaltoglobalmapping_type basic - select this method

1853:    Level: beginner

1855: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1856: M*/
1857: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1858: {
1860:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1861:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1862:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1863:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1864:   return(0);
1865: }

1867: /*MC
1868:       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1869:                                     used this is good for large memory problems.

1871:    Options Database Keys:
1872: .   -islocaltoglobalmapping_type hash - select this method

1874:    Notes:
1875:     This is selected automatically for large problems if the user does not set the type.

1877:    Level: beginner

1879: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1880: M*/
1881: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1882: {
1884:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1885:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1886:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1887:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1888:   return(0);
1889: }

1891: /*@C
1892:     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping

1894:    Not Collective

1896:    Input Parameters:
1897: +  sname - name of a new method
1898: -  routine_create - routine to create method context

1900:    Notes:
1901:    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.

1903:    Sample usage:
1904: .vb
1905:    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1906: .ve

1908:    Then, your mapping can be chosen with the procedural interface via
1909: $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1910:    or at runtime via the option
1911: $     -islocaltoglobalmapping_type my_mapper

1913:    Level: advanced

1915: .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH

1917: @*/
1918: PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1919: {

1923:   ISInitializePackage();
1924:   PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);
1925:   return(0);
1926: }

1928: /*@C
1929:    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.

1931:    Logically Collective on ISLocalToGlobalMapping

1933:    Input Parameters:
1934: +  ltog - the ISLocalToGlobalMapping object
1935: -  type - a known method

1937:    Options Database Key:
1938: .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1939:     of available methods (for instance, basic or hash)

1941:    Notes:
1942:    See "petsc/include/petscis.h" for available methods

1944:   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1945:   then set the ISLocalToGlobalMapping type from the options database rather than by using
1946:   this routine.

1948:   Level: intermediate

1950:   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1951:   are accessed by ISLocalToGlobalMappingSetType().

1953: .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()

1955: @*/
1956: PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1957: {
1958:   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1959:   PetscBool      match;


1965:   PetscObjectTypeCompare((PetscObject)ltog,type,&match);
1966:   if (match) return(0);

1968:    PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);
1969:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1970:   /* Destroy the previous private LTOG context */
1971:   if (ltog->ops->destroy) {
1972:     (*ltog->ops->destroy)(ltog);
1973:     ltog->ops->destroy = NULL;
1974:   }
1975:   PetscObjectChangeTypeName((PetscObject)ltog,type);
1976:   (*r)(ltog);
1977:   return(0);
1978: }

1980: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;

1982: /*@C
1983:   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.

1985:   Not Collective

1987:   Level: advanced

1989: .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1990: @*/
1991: PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1992: {

1996:   if (ISLocalToGlobalMappingRegisterAllCalled) return(0);
1997:   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;

1999:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);
2000:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);
2001:   return(0);
2002: }