Actual source code: pmap.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: /*
  3:    This file contains routines for basic map object implementation.
  4: */

  6: #include <petscis.h>
  7: #include <petscsf.h>
  8: #include <petsc/private/isimpl.h>

 10: /*@
 11:   PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.

 13:   Collective

 15:   Input Parameters:
 16: . comm - the MPI communicator

 18:   Output Parameters:
 19: . map - the new PetscLayout

 21:   Level: advanced

 23:   Notes:
 24:   Typical calling sequence
 25: .vb
 26:        PetscLayoutCreate(MPI_Comm,PetscLayout *);
 27:        PetscLayoutSetBlockSize(PetscLayout,bs);
 28:        PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
 29:        PetscLayoutSetUp(PetscLayout);
 30: .ve
 31:   Alternatively,
 32: $      PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);

 34:   Optionally use any of the following:

 36: + PetscLayoutGetSize(PetscLayout,PetscInt *);
 37: . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
 38: . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
 39: . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
 40: - PetscLayoutDestroy(PetscLayout*);

 42:   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
 43:   user codes unless you really gain something in their use.

 45: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
 46:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(),
 47:           PetscLayoutCreateFromSizes()

 49: @*/
 50: PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
 51: {

 55:   PetscNew(map);

 57:   (*map)->comm        = comm;
 58:   (*map)->bs          = -1;
 59:   (*map)->n           = -1;
 60:   (*map)->N           = -1;
 61:   (*map)->range       = NULL;
 62:   (*map)->range_alloc = PETSC_TRUE;
 63:   (*map)->rstart      = 0;
 64:   (*map)->rend        = 0;
 65:   (*map)->setupcalled = PETSC_FALSE;
 66:   (*map)->oldn        = -1;
 67:   (*map)->oldN        = -1;
 68:   (*map)->oldbs       = -1;
 69:   return(0);
 70: }

 72: /*@
 73:   PetscLayoutCreateFromSizes - Allocates PetscLayout space, sets the layout sizes, and sets the layout up.

 75:   Collective

 77:   Input Parameters:
 78: + comm  - the MPI communicator
 79: . n     - the local size (or PETSC_DECIDE)
 80: . N     - the global size (or PETSC_DECIDE)
 81: - bs    - the block size (or PETSC_DECIDE)

 83:   Output Parameters:
 84: . map - the new PetscLayout

 86:   Level: advanced

 88:   Notes:
 89: $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
 90:   is a shorthand for
 91: .vb
 92:   PetscLayoutCreate(comm,&layout);
 93:   PetscLayoutSetLocalSize(layout,n);
 94:   PetscLayoutSetSize(layout,N);
 95:   PetscLayoutSetBlockSize(layout,bs);
 96:   PetscLayoutSetUp(layout);
 97: .ve

 99: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
100:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromRanges()

102: @*/
103: PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout *map)
104: {

108:   PetscLayoutCreate(comm, map);
109:   PetscLayoutSetLocalSize(*map, n);
110:   PetscLayoutSetSize(*map, N);
111:   PetscLayoutSetBlockSize(*map, bs);
112:   PetscLayoutSetUp(*map);
113:   return(0);
114: }

116: /*@
117:   PetscLayoutDestroy - Frees a map object and frees its range if that exists.

119:   Collective

121:   Input Parameters:
122: . map - the PetscLayout

124:   Level: developer

126:   Note:
127:   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
128:   recommended they not be used in user codes unless you really gain something in their use.

130: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
131:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()

133: @*/
134: PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
135: {

139:   if (!*map) return(0);
140:   if (!(*map)->refcnt--) {
141:     if ((*map)->range_alloc) {PetscFree((*map)->range);}
142:     ISLocalToGlobalMappingDestroy(&(*map)->mapping);
143:     PetscFree((*map));
144:   }
145:   *map = NULL;
146:   return(0);
147: }

149: /*@
150:   PetscLayoutCreateFromRanges - Creates a new PetscLayout with the given ownership ranges and sets it up.

152:   Collective

154:   Input Parameters:
155: + comm  - the MPI communicator
156: . range - the array of ownership ranges for each rank with length commsize+1
157: . mode  - the copy mode for range
158: - bs    - the block size (or PETSC_DECIDE)

160:   Output Parameters:
161: . newmap - the new PetscLayout

163:   Level: developer

165: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
166:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromSizes()

168: @*/
169: PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout *newmap)
170: {
171:   PetscLayout    map;
172:   PetscMPIInt    rank,size;

176:   MPI_Comm_size(comm, &size);
177:   MPI_Comm_rank(comm, &rank);
178:   PetscLayoutCreate(comm, &map);
179:   PetscLayoutSetBlockSize(map, bs);
180:   switch (mode) {
181:     case PETSC_COPY_VALUES:
182:       PetscMalloc1(size+1, &map->range);
183:       PetscArraycpy(map->range, range, size+1);
184:       break;
185:     case PETSC_USE_POINTER:
186:       map->range_alloc = PETSC_FALSE;
187:     default:
188:       map->range = (PetscInt*) range;
189:       break;
190:   }
191:   map->rstart = map->range[rank];
192:   map->rend   = map->range[rank+1];
193:   map->n      = map->rend - map->rstart;
194:   map->N      = map->range[size];
195:   if (PetscDefined(USE_DEBUG)) {  /* just check that n, N and bs are consistent */
196:     PetscInt tmp;
197:     MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);
198:     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
199:     if (map->bs > 1) {
200:       if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
201:     }
202:     if (map->bs > 1) {
203:       if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
204:     }
205:   }
206:   /* lock the layout */
207:   map->setupcalled = PETSC_TRUE;
208:   map->oldn = map->n;
209:   map->oldN = map->N;
210:   map->oldbs = map->bs;
211:   *newmap = map;
212:   return(0);
213: }

215: /*@
216:   PetscLayoutSetUp - given a map where you have set either the global or local
217:                      size sets up the map so that it may be used.

219:   Collective

221:   Input Parameters:
222: . map - pointer to the map

224:   Level: developer

226:   Notes:
227:     Typical calling sequence
228: $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
229: $ PetscLayoutSetBlockSize(PetscLayout,1);
230: $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
231: $ PetscLayoutSetUp(PetscLayout);
232: $ PetscLayoutGetSize(PetscLayout,PetscInt *);

234:   If range exists, and local size is not set, everything gets computed from the range.

236:   If the local size, global size are already set and range exists then this does nothing.

238: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
239:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
240: @*/
241: PetscErrorCode PetscLayoutSetUp(PetscLayout map)
242: {
243:   PetscMPIInt    rank,size;
244:   PetscInt       p;

248:   if (map->setupcalled && (map->n != map->oldn || map->N != map->oldN)) SETERRQ4(map->comm,PETSC_ERR_ARG_WRONGSTATE,"Layout is already setup with (local=%D,global=%D), cannot call setup again with (local=%D,global=%D)", map->oldn, map->oldN, map->n, map->N);
249:   if (map->setupcalled) return(0);

251:   if (map->n > 0 && map->bs > 1) {
252:     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
253:   }
254:   if (map->N > 0 && map->bs > 1) {
255:     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
256:   }

258:   MPI_Comm_size(map->comm, &size);
259:   MPI_Comm_rank(map->comm, &rank);
260:   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
261:   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
262:   PetscSplitOwnership(map->comm,&map->n,&map->N);
263:   map->n = map->n*PetscAbs(map->bs);
264:   map->N = map->N*PetscAbs(map->bs);
265:   if (!map->range) {
266:     PetscMalloc1(size+1, &map->range);
267:   }
268:   MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);

270:   map->range[0] = 0;
271:   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];

273:   map->rstart = map->range[rank];
274:   map->rend   = map->range[rank+1];

276:   /* lock the layout */
277:   map->setupcalled = PETSC_TRUE;
278:   map->oldn = map->n;
279:   map->oldN = map->N;
280:   map->oldbs = map->bs;
281:   return(0);
282: }

284: /*@
285:   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.

287:   Collective on PetscLayout

289:   Input Parameter:
290: . in - input PetscLayout to be duplicated

292:   Output Parameter:
293: . out - the copy

295:   Level: developer

297:   Notes:
298:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

300: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
301: @*/
302: PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
303: {
304:   PetscMPIInt    size;
306:   MPI_Comm       comm = in->comm;

309:   PetscLayoutDestroy(out);
310:   PetscLayoutCreate(comm,out);
311:   MPI_Comm_size(comm,&size);
312:   PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));
313:   if (in->range) {
314:     PetscMalloc1(size+1,&(*out)->range);
315:     PetscArraycpy((*out)->range,in->range,size+1);
316:   }

318:   (*out)->refcnt = 0;
319:   return(0);
320: }

322: /*@
323:   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()

325:   Collective on PetscLayout

327:   Input Parameter:
328: . in - input PetscLayout to be copied

330:   Output Parameter:
331: . out - the reference location

333:   Level: developer

335:   Notes:
336:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

338:   If the out location already contains a PetscLayout it is destroyed

340: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
341: @*/
342: PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
343: {

347:   in->refcnt++;
348:   PetscLayoutDestroy(out);
349:   *out = in;
350:   return(0);
351: }

353: /*@
354:   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout

356:   Collective on PetscLayout

358:   Input Parameter:
359: + in - input PetscLayout
360: - ltog - the local to global mapping


363:   Level: developer

365:   Notes:
366:     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

368:   If the ltog location already contains a PetscLayout it is destroyed

370: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
371: @*/
372: PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
373: {
375:   PetscInt       bs;

378:   ISLocalToGlobalMappingGetBlockSize(ltog,&bs);
379:   if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
380:   PetscObjectReference((PetscObject)ltog);
381:   ISLocalToGlobalMappingDestroy(&in->mapping);
382:   in->mapping = ltog;
383:   return(0);
384: }

386: /*@
387:   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.

389:   Collective on PetscLayout

391:   Input Parameters:
392: + map - pointer to the map
393: - n - the local size

395:   Level: developer

397:   Notes:
398:   Call this after the call to PetscLayoutCreate()

400: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
401:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
402: @*/
403: PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
404: {
406:   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
407:   map->n = n;
408:   return(0);
409: }

411: /*@C
412:      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.

414:     Not Collective

416:    Input Parameters:
417: .    map - pointer to the map

419:    Output Parameters:
420: .    n - the local size

422:    Level: developer

424:     Notes:
425:        Call this after the call to PetscLayoutSetUp()

427:     Fortran Notes:
428:       Not available from Fortran

430: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
431:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()

433: @*/
434: PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
435: {
437:   *n = map->n;
438:   return(0);
439: }

441: /*@
442:   PetscLayoutSetSize - Sets the global size for a PetscLayout object.

444:   Logically Collective on PetscLayout

446:   Input Parameters:
447: + map - pointer to the map
448: - n - the global size

450:   Level: developer

452:   Notes:
453:   Call this after the call to PetscLayoutCreate()

455: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
456:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
457: @*/
458: PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
459: {
461:   map->N = n;
462:   return(0);
463: }

465: /*@
466:   PetscLayoutGetSize - Gets the global size for a PetscLayout object.

468:   Not Collective

470:   Input Parameters:
471: . map - pointer to the map

473:   Output Parameters:
474: . n - the global size

476:   Level: developer

478:   Notes:
479:   Call this after the call to PetscLayoutSetUp()

481: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
482:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
483: @*/
484: PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
485: {
487:   *n = map->N;
488:   return(0);
489: }

491: /*@
492:   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.

494:   Logically Collective on PetscLayout

496:   Input Parameters:
497: + map - pointer to the map
498: - bs - the size

500:   Level: developer

502:   Notes:
503:   Call this after the call to PetscLayoutCreate()

505: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
506:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
507: @*/
508: PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
509: {
511:   if (bs < 0) return(0);
512:   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
513:   if (map->mapping) {
514:     PetscInt       obs;

517:     ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);
518:     if (obs > 1) {
519:       ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);
520:     }
521:   }
522:   map->bs = bs;
523:   return(0);
524: }

526: /*@
527:   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.

529:   Not Collective

531:   Input Parameters:
532: . map - pointer to the map

534:   Output Parameters:
535: . bs - the size

537:   Level: developer

539:   Notes:
540:   Call this after the call to PetscLayoutSetUp()

542: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
543:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
544: @*/
545: PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
546: {
548:   *bs = PetscAbs(map->bs);
549:   return(0);
550: }

552: /*@
553:   PetscLayoutGetRange - gets the range of values owned by this process

555:   Not Collective

557:   Input Parameters:
558: . map - pointer to the map

560:   Output Parameters:
561: + rstart - first index owned by this process
562: - rend   - one more than the last index owned by this process

564:   Level: developer

566:   Notes:
567:   Call this after the call to PetscLayoutSetUp()

569: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
570:           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
571: @*/
572: PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
573: {
575:   if (rstart) *rstart = map->rstart;
576:   if (rend)   *rend   = map->rend;
577:   return(0);
578: }

580: /*@C
581:      PetscLayoutGetRanges - gets the range of values owned by all processes

583:     Not Collective

585:    Input Parameters:
586: .    map - pointer to the map

588:    Output Parameters:
589: .    range - start of each processors range of indices (the final entry is one more then the
590:              last index on the last process)

592:    Level: developer

594:     Notes:
595:        Call this after the call to PetscLayoutSetUp()

597:     Fortran Notes:
598:       Not available from Fortran

600: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
601:           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()

603: @*/
604: PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
605: {
607:   *range = map->range;
608:   return(0);
609: }

611: /*@C
612:    PetscLayoutsCreateSF - Creates a parallel star forest mapping two PetscLayout objects

614:    Collective

616:    Input Arguments:
617: +  rmap - PetscLayout defining the global root space
618: -  lmap - PetscLayout defining the global leaf space

620:    Output Arguments:
621: .  sf - The parallel star forest

623:    Level: intermediate

625: .seealso: PetscSFCreate(), PetscLayoutCreate(), PetscSFSetGraphLayout()
626: @*/
627: PetscErrorCode PetscLayoutsCreateSF(PetscLayout rmap, PetscLayout lmap, PetscSF* sf)
628: {
630:   PetscInt       i,nroots,nleaves = 0;
631:   PetscInt       rN, lst, len;
632:   PetscMPIInt    owner = -1;
633:   PetscSFNode    *remote;
634:   MPI_Comm       rcomm = rmap->comm;
635:   MPI_Comm       lcomm = lmap->comm;
636:   PetscMPIInt    flg;

640:   if (!rmap->setupcalled) SETERRQ(rcomm,PETSC_ERR_ARG_WRONGSTATE,"Root layout not setup");
641:   if (!lmap->setupcalled) SETERRQ(lcomm,PETSC_ERR_ARG_WRONGSTATE,"Leaf layout not setup");
642:   MPI_Comm_compare(rcomm,lcomm,&flg);
643:   if (flg != MPI_CONGRUENT && flg != MPI_IDENT) SETERRQ(rcomm,PETSC_ERR_SUP,"cannot map two layouts with non-matching communicators");
644:   PetscSFCreate(rcomm,sf);
645:   PetscLayoutGetLocalSize(rmap,&nroots);
646:   PetscLayoutGetSize(rmap,&rN);
647:   PetscLayoutGetRange(lmap,&lst,&len);
648:   PetscMalloc1(len-lst,&remote);
649:   for (i = lst; i < len && i < rN; i++) {
650:     if (owner < -1 || i >= rmap->range[owner+1]) {
651:       PetscLayoutFindOwner(rmap,i,&owner);
652:     }
653:     remote[nleaves].rank  = owner;
654:     remote[nleaves].index = i - rmap->range[owner];
655:     nleaves++;
656:   }
657:   PetscSFSetGraph(*sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,remote,PETSC_COPY_VALUES);
658:   PetscFree(remote);
659:   return(0);
660: }

662: /*@C
663:    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout

665:    Collective

667:    Input Arguments:
668: +  sf - star forest
669: .  layout - PetscLayout defining the global space
670: .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
671: .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
672: .  localmode - copy mode for ilocal
673: -  iremote - remote locations of root vertices for each leaf on the current process

675:    Level: intermediate

677:    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
678:    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
679:    needed

681: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
682: @*/
683: PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
684: {
686:   PetscInt       i,nroots;
687:   PetscSFNode    *remote;

690:   PetscLayoutGetLocalSize(layout,&nroots);
691:   PetscMalloc1(nleaves,&remote);
692:   for (i=0; i<nleaves; i++) {
693:     PetscMPIInt owner = -1;
694:     PetscLayoutFindOwner(layout,iremote[i],&owner);
695:     remote[i].rank  = owner;
696:     remote[i].index = iremote[i] - layout->range[owner];
697:   }
698:   PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);
699:   return(0);
700: }

702: /*@
703:   PetscLayoutCompare - Compares two layouts

705:   Not Collective

707:   Input Parameters:
708: + mapa - pointer to the first map
709: - mapb - pointer to the second map

711:   Output Parameters:
712: . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise

714:   Level: beginner

716:   Notes:

718: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
719:           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
720: @*/
721: PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
722: {
724:   PetscMPIInt    sizea,sizeb;

727:   *congruent = PETSC_FALSE;
728:   MPI_Comm_size(mapa->comm,&sizea);
729:   MPI_Comm_size(mapb->comm,&sizeb);
730:   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
731:     PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);
732:   }
733:   return(0);
734: }

736: /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
737: PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
738: {
739:   PetscInt      *owners = map->range;
740:   PetscInt       n      = map->n;
741:   PetscSF        sf;
742:   PetscInt      *lidxs,*work = NULL;
743:   PetscSFNode   *ridxs;
744:   PetscMPIInt    rank, p = 0;
745:   PetscInt       r, len = 0;

749:   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
750:   /* Create SF where leaves are input idxs and roots are owned idxs */
751:   MPI_Comm_rank(map->comm,&rank);
752:   PetscMalloc1(n,&lidxs);
753:   for (r = 0; r < n; ++r) lidxs[r] = -1;
754:   PetscMalloc1(N,&ridxs);
755:   for (r = 0; r < N; ++r) {
756:     const PetscInt idx = idxs[r];
757:     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
758:     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
759:       PetscLayoutFindOwner(map,idx,&p);
760:     }
761:     ridxs[r].rank = p;
762:     ridxs[r].index = idxs[r] - owners[p];
763:   }
764:   PetscSFCreate(map->comm,&sf);
765:   PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);
766:   PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
767:   PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
768:   if (ogidxs) { /* communicate global idxs */
769:     PetscInt cum = 0,start,*work2;

771:     PetscMalloc1(n,&work);
772:     PetscCalloc1(N,&work2);
773:     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
774:     MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);
775:     start -= cum;
776:     cum = 0;
777:     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
778:     PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);
779:     PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);
780:     PetscFree(work2);
781:   }
782:   PetscSFDestroy(&sf);
783:   /* Compress and put in indices */
784:   for (r = 0; r < n; ++r)
785:     if (lidxs[r] >= 0) {
786:       if (work) work[len] = work[r];
787:       lidxs[len++] = r;
788:     }
789:   if (on) *on = len;
790:   if (oidxs) *oidxs = lidxs;
791:   if (ogidxs) *ogidxs = work;
792:   return(0);
793: }