Actual source code: isltog.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2:  #include <petsc/private/isimpl.h>
  3:  #include <petsc/private/hash.h>
  4:  #include <petscsf.h>
  5:  #include <petscviewer.h>

  7: PetscClassId IS_LTOGM_CLASSID;
  8: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);

 10: typedef struct {
 11:   PetscInt    *globals;
 12: } ISLocalToGlobalMapping_Basic;

 14: typedef struct {
 15:   PetscHashI globalht;
 16: } ISLocalToGlobalMapping_Hash;


 19: /* -----------------------------------------------------------------------------------------*/

 21: /*
 22:     Creates the global mapping information in the ISLocalToGlobalMapping structure

 24:     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
 25: */
 26: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
 27: {
 28:   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;

 32:   if (mapping->data) return(0);
 33:   end   = 0;
 34:   start = PETSC_MAX_INT;

 36:   for (i=0; i<n; i++) {
 37:     if (idx[i] < 0) continue;
 38:     if (idx[i] < start) start = idx[i];
 39:     if (idx[i] > end)   end   = idx[i];
 40:   }
 41:   if (start > end) {start = 0; end = -1;}
 42:   mapping->globalstart = start;
 43:   mapping->globalend   = end;
 44:   if (!((PetscObject)mapping)->type_name) {
 45:     if ((end - start) > PetscMax(4*n,1000000)) {
 46:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);
 47:     } else {
 48:       ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);
 49:     }
 50:   }
 51:   (*mapping->ops->globaltolocalmappingsetup)(mapping);
 52:   return(0);
 53: }

 55: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
 56: {
 57:   PetscErrorCode              ierr;
 58:   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
 59:   ISLocalToGlobalMapping_Basic *map;

 62:   start            = mapping->globalstart;
 63:   end              = mapping->globalend;
 64:   PetscNew(&map);
 65:   PetscMalloc1(end-start+2,&globals);
 66:   map->globals     = globals;
 67:   for (i=0; i<end-start+1; i++) globals[i] = -1;
 68:   for (i=0; i<n; i++) {
 69:     if (idx[i] < 0) continue;
 70:     globals[idx[i] - start] = i;
 71:   }
 72:   mapping->data = (void*)map;
 73:   PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
 74:   return(0);
 75: }

 77: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
 78: {
 79:   PetscErrorCode              ierr;
 80:   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
 81:   ISLocalToGlobalMapping_Hash *map;

 84:   PetscNew(&map);
 85:   PetscHashICreate(map->globalht);
 86:   for (i=0; i<n; i++ ) {
 87:     if (idx[i] < 0) continue;
 88:     PetscHashIAdd(map->globalht, idx[i], i);
 89:   }
 90:   mapping->data = (void*)map;
 91:   PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));
 92:   return(0);
 93: }

 95: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
 96: {
 97:   PetscErrorCode              ierr;
 98:   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;

101:   if (!map) return(0);
102:   PetscFree(map->globals);
103:   PetscFree(mapping->data);
104:   return(0);
105: }

107: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108: {
109:   PetscErrorCode              ierr;
110:   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;

113:   if (!map) return(0);
114:   PetscHashIDestroy(map->globalht);
115:   PetscFree(mapping->data);
116:   return(0);
117: }

119: #define GTOLTYPE _Basic
120: #define GTOLNAME _Basic
121: #define GTOL(g, local) do {                  \
122:     local = map->globals[g/bs - start];      \
123:     local = bs*local + (g % bs);             \
124:   } while (0)
125:  #include <../src/vec/is/utils/isltog.h>

127: #define GTOLTYPE _Basic
128: #define GTOLNAME Block_Basic
129: #define GTOL(g, local) do {                  \
130:     local = map->globals[g - start];         \
131:   } while (0)
132:  #include <../src/vec/is/utils/isltog.h>

134: #define GTOLTYPE _Hash
135: #define GTOLNAME _Hash
136: #define GTOL(g, local) do {                  \
137:     PetscHashIMap(map->globalht,g/bs,local); \
138:     local = bs*local + (g % bs);             \
139:    } while (0)
140:  #include <../src/vec/is/utils/isltog.h>

142: #define GTOLTYPE _Hash
143: #define GTOLNAME Block_Hash
144: #define GTOL(g, local) do {                  \
145:     PetscHashIMap(map->globalht,g,local);    \
146:   } while (0)
147:  #include <../src/vec/is/utils/isltog.h>

149: /*@
150:     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object

152:     Not Collective

154:     Input Parameter:
155: .   ltog - local to global mapping

157:     Output Parameter:
158: .   nltog - the duplicated local to global mapping

160:     Level: advanced

162:     Concepts: mapping^local to global

164: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
165: @*/
166: PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
167: {

172:   ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);
173:   return(0);
174: }

176: /*@
177:     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping

179:     Not Collective

181:     Input Parameter:
182: .   ltog - local to global mapping

184:     Output Parameter:
185: .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length

187:     Level: advanced

189:     Concepts: mapping^local to global

191: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
192: @*/
193: PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
194: {
198:   *n = mapping->bs*mapping->n;
199:   return(0);
200: }

202: /*@C
203:     ISLocalToGlobalMappingView - View a local to global mapping

205:     Not Collective

207:     Input Parameters:
208: +   ltog - local to global mapping
209: -   viewer - viewer

211:     Level: advanced

213:     Concepts: mapping^local to global

215: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
216: @*/
217: PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
218: {
219:   PetscInt       i;
220:   PetscMPIInt    rank;
221:   PetscBool      iascii;

226:   if (!viewer) {
227:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
228:   }

231:   MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
232:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
233:   if (iascii) {
234:     PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
235:     PetscViewerASCIIPushSynchronized(viewer);
236:     for (i=0; i<mapping->n; i++) {
237:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
238:     }
239:     PetscViewerFlush(viewer);
240:     PetscViewerASCIIPopSynchronized(viewer);
241:   }
242:   return(0);
243: }

245: /*@
246:     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
247:     ordering and a global parallel ordering.

249:     Not collective

251:     Input Parameter:
252: .   is - index set containing the global numbers for each local number

254:     Output Parameter:
255: .   mapping - new mapping data structure

257:     Notes: the block size of the IS determines the block size of the mapping
258:     Level: advanced

260:     Concepts: mapping^local to global

262: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
263: @*/
264: PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
265: {
267:   PetscInt       n,bs;
268:   const PetscInt *indices;
269:   MPI_Comm       comm;
270:   PetscBool      isblock;


276:   PetscObjectGetComm((PetscObject)is,&comm);
277:   ISGetLocalSize(is,&n);
278:   PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
279:   if (!isblock) {
280:     ISGetIndices(is,&indices);
281:     ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
282:     ISRestoreIndices(is,&indices);
283:   } else {
284:     ISGetBlockSize(is,&bs);
285:     ISBlockGetIndices(is,&indices);
286:     ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
287:     ISBlockRestoreIndices(is,&indices);
288:   }
289:   return(0);
290: }

292: /*@C
293:     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
294:     ordering and a global parallel ordering.

296:     Collective

298:     Input Parameter:
299: +   sf - star forest mapping contiguous local indices to (rank, offset)
300: -   start - first global index on this process

302:     Output Parameter:
303: .   mapping - new mapping data structure

305:     Level: advanced

307:     Concepts: mapping^local to global

309: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
310: @*/
311: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
312: {
314:   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
315:   const PetscInt *ilocal;
316:   MPI_Comm       comm;


322:   PetscObjectGetComm((PetscObject)sf,&comm);
323:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
324:   if (ilocal) {
325:     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
326:   }
327:   else maxlocal = nleaves;
328:   PetscMalloc1(nroots,&globals);
329:   PetscMalloc1(maxlocal,&ltog);
330:   for (i=0; i<nroots; i++) globals[i] = start + i;
331:   for (i=0; i<maxlocal; i++) ltog[i] = -1;
332:   PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);
333:   PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);
334:   ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
335:   PetscFree(globals);
336:   return(0);
337: }

339: /*@
340:     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping

342:     Not collective

344:     Input Parameters:
345: .   mapping - mapping data structure
346: .   bs - the blocksize

348:     Level: advanced

350:     Concepts: mapping^local to global

352: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
353: @*/
354: PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
355: {
356:   PetscInt       *nid;
357:   const PetscInt *oid;
358:   PetscInt       i,cn,on,obs,nn;

363:   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
364:   if (bs == mapping->bs) return(0);
365:   on  = mapping->n;
366:   obs = mapping->bs;
367:   oid = mapping->indices;
368:   nn  = (on*obs)/bs;
369:   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);

371:   PetscMalloc1(nn,&nid);
372:   ISLocalToGlobalMappingGetIndices(mapping,&oid);
373:   for (i=0;i<nn;i++) {
374:     PetscInt j;
375:     for (j=0,cn=0;j<bs-1;j++) {
376:       if (oid[i*bs+j] < 0) { cn++; continue; }
377:       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
378:     }
379:     if (oid[i*bs+j] < 0) cn++;
380:     if (cn) {
381:       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
382:       nid[i] = -1;
383:     } else {
384:       nid[i] = oid[i*bs]/bs;
385:     }
386:   }
387:   ISLocalToGlobalMappingRestoreIndices(mapping,&oid);

389:   mapping->n           = nn;
390:   mapping->bs          = bs;
391:   PetscFree(mapping->indices);
392:   mapping->indices     = nid;
393:   mapping->globalstart = 0;
394:   mapping->globalend   = 0;
395:   if (mapping->ops->destroy) {
396:     (*mapping->ops->destroy)(mapping);
397:   }
398:   return(0);
399: }

401: /*@
402:     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
403:     ordering and a global parallel ordering.

405:     Not Collective

407:     Input Parameters:
408: .   mapping - mapping data structure

410:     Output Parameter:
411: .   bs - the blocksize

413:     Level: advanced

415:     Concepts: mapping^local to global

417: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
418: @*/
419: PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
420: {
423:   *bs = mapping->bs;
424:   return(0);
425: }

427: /*@
428:     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
429:     ordering and a global parallel ordering.

431:     Not Collective, but communicator may have more than one process

433:     Input Parameters:
434: +   comm - MPI communicator
435: .   bs - the block size
436: .   n - the number of local elements divided by the block size, or equivalently the number of block indices
437: .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
438: -   mode - see PetscCopyMode

440:     Output Parameter:
441: .   mapping - new mapping data structure

443:     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1

445:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
446:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
447:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

449:     Level: advanced

451:     Concepts: mapping^local to global

453: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
454:           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
455: @*/
456: PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
457: {
459:   PetscInt       *in;


465:   *mapping = NULL;
466:   ISInitializePackage();

468:   PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
469:                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
470:   (*mapping)->n             = n;
471:   (*mapping)->bs            = bs;
472:   (*mapping)->info_cached   = PETSC_FALSE;
473:   (*mapping)->info_free     = PETSC_FALSE;
474:   (*mapping)->info_procs    = NULL;
475:   (*mapping)->info_numprocs = NULL;
476:   (*mapping)->info_indices  = NULL;

478:   (*mapping)->ops->globaltolocalmappingapply      = NULL;
479:   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
480:   (*mapping)->ops->destroy                        = NULL;
481:   if (mode == PETSC_COPY_VALUES) {
482:     PetscMalloc1(n,&in);
483:     PetscMemcpy(in,indices,n*sizeof(PetscInt));
484:     (*mapping)->indices = in;
485:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
486:   } else if (mode == PETSC_OWN_POINTER) {
487:     (*mapping)->indices = (PetscInt*)indices;
488:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
489:   }
490:   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
491:   return(0);
492: }

494: PetscFunctionList ISLocalToGlobalMappingList = NULL;


497: /*@
498:    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.

500:    Not collective

502:    Input Parameters:
503: .  mapping - mapping data structure

505:    Level: advanced

507: @*/
508: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
509: {
510:   PetscErrorCode             ierr;
511:   char                       type[256];
512:   ISLocalToGlobalMappingType defaulttype = "Not set";
513:   PetscBool                  flg;

517:   ISLocalToGlobalMappingRegisterAll();
518:   PetscObjectOptionsBegin((PetscObject)mapping);
519:   PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);
520:   if (flg) {
521:     ISLocalToGlobalMappingSetType(mapping,type);
522:   }
523:   PetscOptionsEnd();
524:   return(0);
525: }

527: /*@
528:    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
529:    ordering and a global parallel ordering.

531:    Note Collective

533:    Input Parameters:
534: .  mapping - mapping data structure

536:    Level: advanced

538: .seealso: ISLocalToGlobalMappingCreate()
539: @*/
540: PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
541: {

545:   if (!*mapping) return(0);
547:   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;return(0);}
548:   PetscFree((*mapping)->indices);
549:   PetscFree((*mapping)->info_procs);
550:   PetscFree((*mapping)->info_numprocs);
551:   if ((*mapping)->info_indices) {
552:     PetscInt i;

554:     PetscFree(((*mapping)->info_indices)[0]);
555:     for (i=1; i<(*mapping)->info_nproc; i++) {
556:       PetscFree(((*mapping)->info_indices)[i]);
557:     }
558:     PetscFree((*mapping)->info_indices);
559:   }
560:   if ((*mapping)->ops->destroy) {
561:     (*(*mapping)->ops->destroy)(*mapping);
562:   }
563:   PetscHeaderDestroy(mapping);
564:   *mapping = 0;
565:   return(0);
566: }

568: /*@
569:     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
570:     a new index set using the global numbering defined in an ISLocalToGlobalMapping
571:     context.

573:     Not collective

575:     Input Parameters:
576: +   mapping - mapping between local and global numbering
577: -   is - index set in local numbering

579:     Output Parameters:
580: .   newis - index set in global numbering

582:     Level: advanced

584:     Concepts: mapping^local to global

586: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
587:           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
588: @*/
589: PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
590: {
592:   PetscInt       n,*idxout;
593:   const PetscInt *idxin;


600:   ISGetLocalSize(is,&n);
601:   ISGetIndices(is,&idxin);
602:   PetscMalloc1(n,&idxout);
603:   ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
604:   ISRestoreIndices(is,&idxin);
605:   ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
606:   return(0);
607: }

609: /*@
610:    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
611:    and converts them to the global numbering.

613:    Not collective

615:    Input Parameters:
616: +  mapping - the local to global mapping context
617: .  N - number of integers
618: -  in - input indices in local numbering

620:    Output Parameter:
621: .  out - indices in global numbering

623:    Notes:
624:    The in and out array parameters may be identical.

626:    Level: advanced

628: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
629:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
630:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

632:     Concepts: mapping^local to global
633: @*/
634: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
635: {
636:   PetscInt i,bs,Nmax;

640:   bs   = mapping->bs;
641:   Nmax = bs*mapping->n;
642:   if (bs == 1) {
643:     const PetscInt *idx = mapping->indices;
644:     for (i=0; i<N; i++) {
645:       if (in[i] < 0) {
646:         out[i] = in[i];
647:         continue;
648:       }
649:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
650:       out[i] = idx[in[i]];
651:     }
652:   } else {
653:     const PetscInt *idx = mapping->indices;
654:     for (i=0; i<N; i++) {
655:       if (in[i] < 0) {
656:         out[i] = in[i];
657:         continue;
658:       }
659:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
660:       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
661:     }
662:   }
663:   return(0);
664: }

666: /*@
667:    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering

669:    Not collective

671:    Input Parameters:
672: +  mapping - the local to global mapping context
673: .  N - number of integers
674: -  in - input indices in local block numbering

676:    Output Parameter:
677: .  out - indices in global block numbering

679:    Notes:
680:    The in and out array parameters may be identical.

682:    Example:
683:      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
684:      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.

686:    Level: advanced

688: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
689:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
690:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

692:     Concepts: mapping^local to global
693: @*/
694: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
695: {

699:   {
700:     PetscInt i,Nmax = mapping->n;
701:     const PetscInt *idx = mapping->indices;

703:     for (i=0; i<N; i++) {
704:       if (in[i] < 0) {
705:         out[i] = in[i];
706:         continue;
707:       }
708:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
709:       out[i] = idx[in[i]];
710:     }
711:   }
712:   return(0);
713: }

715: /*@
716:     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
717:     specified with a global numbering.

719:     Not collective

721:     Input Parameters:
722: +   mapping - mapping between local and global numbering
723: .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
724:            IS_GTOLM_DROP - drops the indices with no local value from the output list
725: .   n - number of global indices to map
726: -   idx - global indices to map

728:     Output Parameters:
729: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
730: -   idxout - local index of each global index, one must pass in an array long enough
731:              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
732:              idxout == NULL to determine the required length (returned in nout)
733:              and then allocate the required space and call ISGlobalToLocalMappingApply()
734:              a second time to set the values.

736:     Notes:
737:     Either nout or idxout may be NULL. idx and idxout may be identical.

739:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
740:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
741:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

743:     Level: advanced

745:     Developer Note: The manual page states that idx and idxout may be identical but the calling
746:        sequence declares idx as const so it cannot be the same as idxout.

748:     Concepts: mapping^global to local

750: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
751:           ISLocalToGlobalMappingDestroy()
752: @*/
753: PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
754: {

759:   if (!mapping->data) {
760:     ISGlobalToLocalMappingSetUp(mapping);
761:   }
762:   (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);
763:   return(0);
764: }

766: /*@
767:     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
768:     a new index set using the local numbering defined in an ISLocalToGlobalMapping
769:     context.

771:     Not collective

773:     Input Parameters:
774: +   mapping - mapping between local and global numbering
775: -   is - index set in global numbering

777:     Output Parameters:
778: .   newis - index set in local numbering

780:     Level: advanced

782:     Concepts: mapping^local to global

784: .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
785:           ISLocalToGlobalMappingDestroy()
786: @*/
787: PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, IS is,IS *newis)
788: {
790:   PetscInt       n,nout,*idxout;
791:   const PetscInt *idxin;


798:   ISGetLocalSize(is,&n);
799:   ISGetIndices(is,&idxin);
800:   if (type == IS_GTOLM_MASK) {
801:     PetscMalloc1(n,&idxout);
802:   } else {
803:     ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);
804:     PetscMalloc1(nout,&idxout);
805:   }
806:   ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);
807:   ISRestoreIndices(is,&idxin);
808:   ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);
809:   return(0);
810: }

812: /*@
813:     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
814:     specified with a block global numbering.

816:     Not collective

818:     Input Parameters:
819: +   mapping - mapping between local and global numbering
820: .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
821:            IS_GTOLM_DROP - drops the indices with no local value from the output list
822: .   n - number of global indices to map
823: -   idx - global indices to map

825:     Output Parameters:
826: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
827: -   idxout - local index of each global index, one must pass in an array long enough
828:              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
829:              idxout == NULL to determine the required length (returned in nout)
830:              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
831:              a second time to set the values.

833:     Notes:
834:     Either nout or idxout may be NULL. idx and idxout may be identical.

836:     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
837:     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
838:     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.

840:     Level: advanced

842:     Developer Note: The manual page states that idx and idxout may be identical but the calling
843:        sequence declares idx as const so it cannot be the same as idxout.

845:     Concepts: mapping^global to local

847: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
848:           ISLocalToGlobalMappingDestroy()
849: @*/
850: PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
851:                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
852: {

857:   if (!mapping->data) {
858:     ISGlobalToLocalMappingSetUp(mapping);
859:   }
860:   (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);
861:   return(0);
862: }


865: /*@C
866:     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
867:      each index shared by more than one processor

869:     Collective on ISLocalToGlobalMapping

871:     Input Parameters:
872: .   mapping - the mapping from local to global indexing

874:     Output Parameter:
875: +   nproc - number of processors that are connected to this one
876: .   proc - neighboring processors
877: .   numproc - number of indices for each subdomain (processor)
878: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

880:     Level: advanced

882:     Concepts: mapping^local to global

884:     Fortran Usage:
885: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
886: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
887:           PetscInt indices[nproc][numprocmax],ierr)
888:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
889:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


892: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
893:           ISLocalToGlobalMappingRestoreInfo()
894: @*/
895: PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
896: {

901:   if (mapping->info_cached) {
902:     *nproc    = mapping->info_nproc;
903:     *procs    = mapping->info_procs;
904:     *numprocs = mapping->info_numprocs;
905:     *indices  = mapping->info_indices;
906:   } else {
907:     ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);
908:   }
909:   return(0);
910: }

912: static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
913: {
915:   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
916:   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
917:   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
918:   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
919:   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
920:   PetscInt       first_procs,first_numprocs,*first_indices;
921:   MPI_Request    *recv_waits,*send_waits;
922:   MPI_Status     recv_status,*send_status,*recv_statuses;
923:   MPI_Comm       comm;
924:   PetscBool      debug = PETSC_FALSE;

927:   PetscObjectGetComm((PetscObject)mapping,&comm);
928:   MPI_Comm_size(comm,&size);
929:   MPI_Comm_rank(comm,&rank);
930:   if (size == 1) {
931:     *nproc         = 0;
932:     *procs         = NULL;
933:     PetscNew(numprocs);
934:     (*numprocs)[0] = 0;
935:     PetscNew(indices);
936:     (*indices)[0]  = NULL;
937:     /* save info for reuse */
938:     mapping->info_nproc = *nproc;
939:     mapping->info_procs = *procs;
940:     mapping->info_numprocs = *numprocs;
941:     mapping->info_indices = *indices;
942:     mapping->info_cached = PETSC_TRUE;
943:     return(0);
944:   }

946:   PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);

948:   /*
949:     Notes on ISLocalToGlobalMappingGetBlockInfo

951:     globally owned node - the nodes that have been assigned to this processor in global
952:            numbering, just for this routine.

954:     nontrivial globally owned node - node assigned to this processor that is on a subdomain
955:            boundary (i.e. is has more than one local owner)

957:     locally owned node - node that exists on this processors subdomain

959:     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
960:            local subdomain
961:   */
962:   PetscObjectGetNewTag((PetscObject)mapping,&tag1);
963:   PetscObjectGetNewTag((PetscObject)mapping,&tag2);
964:   PetscObjectGetNewTag((PetscObject)mapping,&tag3);

966:   for (i=0; i<n; i++) {
967:     if (lindices[i] > max) max = lindices[i];
968:   }
969:   MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
970:   Ng++;
971:   MPI_Comm_size(comm,&size);
972:   MPI_Comm_rank(comm,&rank);
973:   scale  = Ng/size + 1;
974:   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
975:   rstart = scale*rank;

977:   /* determine ownership ranges of global indices */
978:   PetscMalloc1(2*size,&nprocs);
979:   PetscMemzero(nprocs,2*size*sizeof(PetscInt));

981:   /* determine owners of each local node  */
982:   PetscMalloc1(n,&owner);
983:   for (i=0; i<n; i++) {
984:     proc             = lindices[i]/scale; /* processor that globally owns this index */
985:     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
986:     owner[i]         = proc;
987:     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
988:   }
989:   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
990:   PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);

992:   /* inform other processors of number of messages and max length*/
993:   PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
994:   PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);

996:   /* post receives for owned rows */
997:   PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
998:   PetscMalloc1(nrecvs+1,&recv_waits);
999:   for (i=0; i<nrecvs; i++) {
1000:     MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
1001:   }

1003:   /* pack messages containing lists of local nodes to owners */
1004:   PetscMalloc1(2*n+1,&sends);
1005:   PetscMalloc1(size+1,&starts);
1006:   starts[0] = 0;
1007:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1008:   for (i=0; i<n; i++) {
1009:     sends[starts[owner[i]]++] = lindices[i];
1010:     sends[starts[owner[i]]++] = i;
1011:   }
1012:   PetscFree(owner);
1013:   starts[0] = 0;
1014:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];

1016:   /* send the messages */
1017:   PetscMalloc1(nsends+1,&send_waits);
1018:   PetscMalloc1(nsends+1,&dest);
1019:   cnt = 0;
1020:   for (i=0; i<size; i++) {
1021:     if (nprocs[2*i]) {
1022:       MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
1023:       dest[cnt] = i;
1024:       cnt++;
1025:     }
1026:   }
1027:   PetscFree(starts);

1029:   /* wait on receives */
1030:   PetscMalloc1(nrecvs+1,&source);
1031:   PetscMalloc1(nrecvs+1,&len);
1032:   cnt  = nrecvs;
1033:   PetscMalloc1(ng+1,&nownedsenders);
1034:   PetscMemzero(nownedsenders,ng*sizeof(PetscInt));
1035:   while (cnt) {
1036:     MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
1037:     /* unpack receives into our local space */
1038:     MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
1039:     source[imdex] = recv_status.MPI_SOURCE;
1040:     len[imdex]    = len[imdex]/2;
1041:     /* count how many local owners for each of my global owned indices */
1042:     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1043:     cnt--;
1044:   }
1045:   PetscFree(recv_waits);

1047:   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1048:   nowned  = 0;
1049:   nownedm = 0;
1050:   for (i=0; i<ng; i++) {
1051:     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1052:   }

1054:   /* create single array to contain rank of all local owners of each globally owned index */
1055:   PetscMalloc1(nownedm+1,&ownedsenders);
1056:   PetscMalloc1(ng+1,&starts);
1057:   starts[0] = 0;
1058:   for (i=1; i<ng; i++) {
1059:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1060:     else starts[i] = starts[i-1];
1061:   }

1063:   /* for each nontrival globally owned node list all arriving processors */
1064:   for (i=0; i<nrecvs; i++) {
1065:     for (j=0; j<len[i]; j++) {
1066:       node = recvs[2*i*nmax+2*j]-rstart;
1067:       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1068:     }
1069:   }

1071:   if (debug) { /* -----------------------------------  */
1072:     starts[0] = 0;
1073:     for (i=1; i<ng; i++) {
1074:       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1075:       else starts[i] = starts[i-1];
1076:     }
1077:     for (i=0; i<ng; i++) {
1078:       if (nownedsenders[i] > 1) {
1079:         PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
1080:         for (j=0; j<nownedsenders[i]; j++) {
1081:           PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
1082:         }
1083:         PetscSynchronizedPrintf(comm,"\n");
1084:       }
1085:     }
1086:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1087:   } /* -----------------------------------  */

1089:   /* wait on original sends */
1090:   if (nsends) {
1091:     PetscMalloc1(nsends,&send_status);
1092:     MPI_Waitall(nsends,send_waits,send_status);
1093:     PetscFree(send_status);
1094:   }
1095:   PetscFree(send_waits);
1096:   PetscFree(sends);
1097:   PetscFree(nprocs);

1099:   /* pack messages to send back to local owners */
1100:   starts[0] = 0;
1101:   for (i=1; i<ng; i++) {
1102:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1103:     else starts[i] = starts[i-1];
1104:   }
1105:   nsends2 = nrecvs;
1106:   PetscMalloc1(nsends2+1,&nprocs); /* length of each message */
1107:   for (i=0; i<nrecvs; i++) {
1108:     nprocs[i] = 1;
1109:     for (j=0; j<len[i]; j++) {
1110:       node = recvs[2*i*nmax+2*j]-rstart;
1111:       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1112:     }
1113:   }
1114:   nt = 0;
1115:   for (i=0; i<nsends2; i++) nt += nprocs[i];

1117:   PetscMalloc1(nt+1,&sends2);
1118:   PetscMalloc1(nsends2+1,&starts2);

1120:   starts2[0] = 0;
1121:   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1122:   /*
1123:      Each message is 1 + nprocs[i] long, and consists of
1124:        (0) the number of nodes being sent back
1125:        (1) the local node number,
1126:        (2) the number of processors sharing it,
1127:        (3) the processors sharing it
1128:   */
1129:   for (i=0; i<nsends2; i++) {
1130:     cnt = 1;
1131:     sends2[starts2[i]] = 0;
1132:     for (j=0; j<len[i]; j++) {
1133:       node = recvs[2*i*nmax+2*j]-rstart;
1134:       if (nownedsenders[node] > 1) {
1135:         sends2[starts2[i]]++;
1136:         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1137:         sends2[starts2[i]+cnt++] = nownedsenders[node];
1138:         PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));
1139:         cnt += nownedsenders[node];
1140:       }
1141:     }
1142:   }

1144:   /* receive the message lengths */
1145:   nrecvs2 = nsends;
1146:   PetscMalloc1(nrecvs2+1,&lens2);
1147:   PetscMalloc1(nrecvs2+1,&starts3);
1148:   PetscMalloc1(nrecvs2+1,&recv_waits);
1149:   for (i=0; i<nrecvs2; i++) {
1150:     MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
1151:   }

1153:   /* send the message lengths */
1154:   for (i=0; i<nsends2; i++) {
1155:     MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
1156:   }

1158:   /* wait on receives of lens */
1159:   if (nrecvs2) {
1160:     PetscMalloc1(nrecvs2,&recv_statuses);
1161:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1162:     PetscFree(recv_statuses);
1163:   }
1164:   PetscFree(recv_waits);

1166:   starts3[0] = 0;
1167:   nt         = 0;
1168:   for (i=0; i<nrecvs2-1; i++) {
1169:     starts3[i+1] = starts3[i] + lens2[i];
1170:     nt          += lens2[i];
1171:   }
1172:   if (nrecvs2) nt += lens2[nrecvs2-1];

1174:   PetscMalloc1(nt+1,&recvs2);
1175:   PetscMalloc1(nrecvs2+1,&recv_waits);
1176:   for (i=0; i<nrecvs2; i++) {
1177:     MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
1178:   }

1180:   /* send the messages */
1181:   PetscMalloc1(nsends2+1,&send_waits);
1182:   for (i=0; i<nsends2; i++) {
1183:     MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
1184:   }

1186:   /* wait on receives */
1187:   if (nrecvs2) {
1188:     PetscMalloc1(nrecvs2,&recv_statuses);
1189:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1190:     PetscFree(recv_statuses);
1191:   }
1192:   PetscFree(recv_waits);
1193:   PetscFree(nprocs);

1195:   if (debug) { /* -----------------------------------  */
1196:     cnt = 0;
1197:     for (i=0; i<nrecvs2; i++) {
1198:       nt = recvs2[cnt++];
1199:       for (j=0; j<nt; j++) {
1200:         PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
1201:         for (k=0; k<recvs2[cnt+1]; k++) {
1202:           PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
1203:         }
1204:         cnt += 2 + recvs2[cnt+1];
1205:         PetscSynchronizedPrintf(comm,"\n");
1206:       }
1207:     }
1208:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1209:   } /* -----------------------------------  */

1211:   /* count number subdomains for each local node */
1212:   PetscMalloc1(size,&nprocs);
1213:   PetscMemzero(nprocs,size*sizeof(PetscInt));
1214:   cnt  = 0;
1215:   for (i=0; i<nrecvs2; i++) {
1216:     nt = recvs2[cnt++];
1217:     for (j=0; j<nt; j++) {
1218:       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1219:       cnt += 2 + recvs2[cnt+1];
1220:     }
1221:   }
1222:   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1223:   *nproc    = nt;
1224:   PetscMalloc1(nt+1,procs);
1225:   PetscMalloc1(nt+1,numprocs);
1226:   PetscMalloc1(nt+1,indices);
1227:   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1228:   PetscMalloc1(size,&bprocs);
1229:   cnt  = 0;
1230:   for (i=0; i<size; i++) {
1231:     if (nprocs[i] > 0) {
1232:       bprocs[i]        = cnt;
1233:       (*procs)[cnt]    = i;
1234:       (*numprocs)[cnt] = nprocs[i];
1235:       PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1236:       cnt++;
1237:     }
1238:   }

1240:   /* make the list of subdomains for each nontrivial local node */
1241:   PetscMemzero(*numprocs,nt*sizeof(PetscInt));
1242:   cnt  = 0;
1243:   for (i=0; i<nrecvs2; i++) {
1244:     nt = recvs2[cnt++];
1245:     for (j=0; j<nt; j++) {
1246:       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1247:       cnt += 2 + recvs2[cnt+1];
1248:     }
1249:   }
1250:   PetscFree(bprocs);
1251:   PetscFree(recvs2);

1253:   /* sort the node indexing by their global numbers */
1254:   nt = *nproc;
1255:   for (i=0; i<nt; i++) {
1256:     PetscMalloc1((*numprocs)[i],&tmp);
1257:     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1258:     PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1259:     PetscFree(tmp);
1260:   }

1262:   if (debug) { /* -----------------------------------  */
1263:     nt = *nproc;
1264:     for (i=0; i<nt; i++) {
1265:       PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1266:       for (j=0; j<(*numprocs)[i]; j++) {
1267:         PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1268:       }
1269:       PetscSynchronizedPrintf(comm,"\n");
1270:     }
1271:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1272:   } /* -----------------------------------  */

1274:   /* wait on sends */
1275:   if (nsends2) {
1276:     PetscMalloc1(nsends2,&send_status);
1277:     MPI_Waitall(nsends2,send_waits,send_status);
1278:     PetscFree(send_status);
1279:   }

1281:   PetscFree(starts3);
1282:   PetscFree(dest);
1283:   PetscFree(send_waits);

1285:   PetscFree(nownedsenders);
1286:   PetscFree(ownedsenders);
1287:   PetscFree(starts);
1288:   PetscFree(starts2);
1289:   PetscFree(lens2);

1291:   PetscFree(source);
1292:   PetscFree(len);
1293:   PetscFree(recvs);
1294:   PetscFree(nprocs);
1295:   PetscFree(sends2);

1297:   /* put the information about myself as the first entry in the list */
1298:   first_procs    = (*procs)[0];
1299:   first_numprocs = (*numprocs)[0];
1300:   first_indices  = (*indices)[0];
1301:   for (i=0; i<*nproc; i++) {
1302:     if ((*procs)[i] == rank) {
1303:       (*procs)[0]    = (*procs)[i];
1304:       (*numprocs)[0] = (*numprocs)[i];
1305:       (*indices)[0]  = (*indices)[i];
1306:       (*procs)[i]    = first_procs;
1307:       (*numprocs)[i] = first_numprocs;
1308:       (*indices)[i]  = first_indices;
1309:       break;
1310:     }
1311:   }

1313:   /* save info for reuse */
1314:   mapping->info_nproc = *nproc;
1315:   mapping->info_procs = *procs;
1316:   mapping->info_numprocs = *numprocs;
1317:   mapping->info_indices = *indices;
1318:   mapping->info_cached = PETSC_TRUE;
1319:   return(0);
1320: }

1322: /*@C
1323:     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()

1325:     Collective on ISLocalToGlobalMapping

1327:     Input Parameters:
1328: .   mapping - the mapping from local to global indexing

1330:     Output Parameter:
1331: +   nproc - number of processors that are connected to this one
1332: .   proc - neighboring processors
1333: .   numproc - number of indices for each processor
1334: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1336:     Level: advanced

1338: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1339:           ISLocalToGlobalMappingGetInfo()
1340: @*/
1341: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1342: {

1347:   if (mapping->info_free) {
1348:     PetscFree(*numprocs);
1349:     if (*indices) {
1350:       PetscInt i;

1352:       PetscFree((*indices)[0]);
1353:       for (i=1; i<*nproc; i++) {
1354:         PetscFree((*indices)[i]);
1355:       }
1356:       PetscFree(*indices);
1357:     }
1358:   }
1359:   *nproc    = 0;
1360:   *procs    = NULL;
1361:   *numprocs = NULL;
1362:   *indices  = NULL;
1363:   return(0);
1364: }

1366: /*@C
1367:     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1368:      each index shared by more than one processor

1370:     Collective on ISLocalToGlobalMapping

1372:     Input Parameters:
1373: .   mapping - the mapping from local to global indexing

1375:     Output Parameter:
1376: +   nproc - number of processors that are connected to this one
1377: .   proc - neighboring processors
1378: .   numproc - number of indices for each subdomain (processor)
1379: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1381:     Level: advanced

1383:     Concepts: mapping^local to global

1385:     Fortran Usage:
1386: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1387: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1388:           PetscInt indices[nproc][numprocmax],ierr)
1389:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1390:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


1393: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1394:           ISLocalToGlobalMappingRestoreInfo()
1395: @*/
1396: PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1397: {
1399:   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;

1403:   ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);
1404:   if (bs > 1) { /* we need to expand the cached info */
1405:     PetscCalloc1(*nproc,&*indices);
1406:     PetscCalloc1(*nproc,&*numprocs);
1407:     for (i=0; i<*nproc; i++) {
1408:       PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);
1409:       for (j=0; j<bnumprocs[i]; j++) {
1410:         for (k=0; k<bs; k++) {
1411:           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1412:         }
1413:       }
1414:       (*numprocs)[i] = bnumprocs[i]*bs;
1415:     }
1416:     mapping->info_free = PETSC_TRUE;
1417:   } else {
1418:     *numprocs = bnumprocs;
1419:     *indices  = bindices;
1420:   }
1421:   return(0);
1422: }

1424: /*@C
1425:     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()

1427:     Collective on ISLocalToGlobalMapping

1429:     Input Parameters:
1430: .   mapping - the mapping from local to global indexing

1432:     Output Parameter:
1433: +   nproc - number of processors that are connected to this one
1434: .   proc - neighboring processors
1435: .   numproc - number of indices for each processor
1436: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1438:     Level: advanced

1440: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1441:           ISLocalToGlobalMappingGetInfo()
1442: @*/
1443: PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1444: {

1448:   ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1449:   return(0);
1450: }

1452: /*@C
1453:    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped

1455:    Not Collective

1457:    Input Arguments:
1458: . ltog - local to global mapping

1460:    Output Arguments:
1461: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()

1463:    Level: advanced

1465:    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array

1467: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1468: @*/
1469: PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1470: {
1474:   if (ltog->bs == 1) {
1475:     *array = ltog->indices;
1476:   } else {
1477:     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1478:     const PetscInt *ii;

1481:     PetscMalloc1(bs*n,&jj);
1482:     *array = jj;
1483:     k    = 0;
1484:     ii   = ltog->indices;
1485:     for (i=0; i<n; i++)
1486:       for (j=0; j<bs; j++)
1487:         jj[k++] = bs*ii[i] + j;
1488:   }
1489:   return(0);
1490: }

1492: /*@C
1493:    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()

1495:    Not Collective

1497:    Input Arguments:
1498: + ltog - local to global mapping
1499: - array - array of indices

1501:    Level: advanced

1503: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1504: @*/
1505: PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1506: {
1510:   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");

1512:   if (ltog->bs > 1) {
1514:     PetscFree(*(void**)array);
1515:   }
1516:   return(0);
1517: }

1519: /*@C
1520:    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block

1522:    Not Collective

1524:    Input Arguments:
1525: . ltog - local to global mapping

1527:    Output Arguments:
1528: . array - array of indices

1530:    Level: advanced

1532: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1533: @*/
1534: PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1535: {
1539:   *array = ltog->indices;
1540:   return(0);
1541: }

1543: /*@C
1544:    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()

1546:    Not Collective

1548:    Input Arguments:
1549: + ltog - local to global mapping
1550: - array - array of indices

1552:    Level: advanced

1554: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1555: @*/
1556: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1557: {
1561:   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1562:   *array = NULL;
1563:   return(0);
1564: }

1566: /*@C
1567:    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings

1569:    Not Collective

1571:    Input Arguments:
1572: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1573: . n - number of mappings to concatenate
1574: - ltogs - local to global mappings

1576:    Output Arguments:
1577: . ltogcat - new mapping

1579:    Note: this currently always returns a mapping with block size of 1

1581:    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case

1583:    Level: advanced

1585: .seealso: ISLocalToGlobalMappingCreate()
1586: @*/
1587: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1588: {
1589:   PetscInt       i,cnt,m,*idx;

1593:   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1597:   for (cnt=0,i=0; i<n; i++) {
1598:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1599:     cnt += m;
1600:   }
1601:   PetscMalloc1(cnt,&idx);
1602:   for (cnt=0,i=0; i<n; i++) {
1603:     const PetscInt *subidx;
1604:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1605:     ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1606:     PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));
1607:     ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1608:     cnt += m;
1609:   }
1610:   ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1611:   return(0);
1612: }

1614: /*MC
1615:       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1616:                                     used this is good for only small and moderate size problems.

1618:    Options Database Keys:
1619: +   -islocaltoglobalmapping_type basic - select this method

1621:    Level: beginner

1623: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1624: M*/
1625: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1626: {
1628:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1629:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1630:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1631:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1632:   return(0);
1633: }

1635: /*MC
1636:       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1637:                                     used this is good for large memory problems.

1639:    Options Database Keys:
1640: +   -islocaltoglobalmapping_type hash - select this method


1643:    Notes: This is selected automatically for large problems if the user does not set the type.

1645:    Level: beginner

1647: .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1648: M*/
1649: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1650: {
1652:   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1653:   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1654:   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1655:   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1656:   return(0);
1657: }


1660: /*@C
1661:     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping

1663:    Not Collective

1665:    Input Parameters:
1666: +  sname - name of a new method
1667: -  routine_create - routine to create method context

1669:    Notes:
1670:    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.

1672:    Sample usage:
1673: .vb
1674:    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1675: .ve

1677:    Then, your mapping can be chosen with the procedural interface via
1678: $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1679:    or at runtime via the option
1680: $     -islocaltoglobalmapping_type my_mapper

1682:    Level: advanced

1684: .keywords: ISLocalToGlobalMappingType, register

1686: .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH

1688: @*/
1689: PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1690: {

1694:   PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);
1695:   return(0);
1696: }

1698: /*@C
1699:    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.

1701:    Logically Collective on ISLocalToGlobalMapping

1703:    Input Parameters:
1704: +  ltog - the ISLocalToGlobalMapping object
1705: -  type - a known method

1707:    Options Database Key:
1708: .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1709:     of available methods (for instance, basic or hash)

1711:    Notes:
1712:    See "petsc/include/petscis.h" for available methods

1714:   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1715:   then set the ISLocalToGlobalMapping type from the options database rather than by using
1716:   this routine.

1718:   Level: intermediate

1720:   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1721:   are accessed by ISLocalToGlobalMappingSetType().

1723: .keywords: ISLocalToGlobalMapping, set, method

1725: .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()

1727: @*/
1728: PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1729: {
1730:   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1731:   PetscBool      match;


1737:   PetscObjectTypeCompare((PetscObject)ltog,type,&match);
1738:   if (match) return(0);

1740:    PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);
1741:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1742:   /* Destroy the previous private LTOG context */
1743:   if (ltog->ops->destroy) {
1744:     (*ltog->ops->destroy)(ltog);
1745:     ltog->ops->destroy = NULL;
1746:   }
1747:   PetscObjectChangeTypeName((PetscObject)ltog,type);
1748:   (*r)(ltog);
1749:   return(0);
1750: }

1752: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;

1754: /*@C
1755:   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.

1757:   Not Collective

1759:   Level: advanced

1761: .keywords: IS, register, all
1762: .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1763: @*/
1764: PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1765: {

1769:   if (ISLocalToGlobalMappingRegisterAllCalled) return(0);
1770:   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;

1772:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);
1773:   ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);
1774:   return(0);
1775: }