Actual source code: pmap.c
petsc-3.12.5 2020-03-29
2: /*
3: This file contains routines for basic map object implementation.
4: */
6: #include <petscis.h>
7: #include <petscsf.h>
8: #include <petsc/private/isimpl.h>
10: /*@
11: PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.
13: Collective
15: Input Parameters:
16: . comm - the MPI communicator
18: Output Parameters:
19: . map - the new PetscLayout
21: Level: advanced
23: Notes:
24: Typical calling sequence
25: .vb
26: PetscLayoutCreate(MPI_Comm,PetscLayout *);
27: PetscLayoutSetBlockSize(PetscLayout,bs);
28: PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
29: PetscLayoutSetUp(PetscLayout);
30: .ve
31: Alternatively,
32: $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
34: Optionally use any of the following:
36: + PetscLayoutGetSize(PetscLayout,PetscInt *);
37: . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
38: . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
39: . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
40: - PetscLayoutDestroy(PetscLayout*);
42: The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
43: user codes unless you really gain something in their use.
45: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
46: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(),
47: PetscLayoutCreateFromSizes()
49: @*/
50: PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
51: {
55: PetscNew(map);
57: (*map)->comm = comm;
58: (*map)->bs = -1;
59: (*map)->n = -1;
60: (*map)->N = -1;
61: (*map)->range = NULL;
62: (*map)->range_alloc = PETSC_TRUE;
63: (*map)->rstart = 0;
64: (*map)->rend = 0;
65: (*map)->setupcalled = PETSC_FALSE;
66: (*map)->oldn = -1;
67: (*map)->oldN = -1;
68: (*map)->oldbs = -1;
69: return(0);
70: }
72: /*@
73: PetscLayoutCreateFromSizes - Allocates PetscLayout space, sets the layout sizes, and sets the layout up.
75: Collective
77: Input Parameters:
78: + comm - the MPI communicator
79: . n - the local size (or PETSC_DECIDE)
80: . N - the global size (or PETSC_DECIDE)
81: . bs - the block size (or PETSC_DECIDE)
83: Output Parameters:
84: . map - the new PetscLayout
86: Level: advanced
88: Notes:
89: $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
90: is a shorthand for
91: .vb
92: PetscLayoutCreate(comm,&layout);
93: PetscLayoutSetLocalSize(layout,n);
94: PetscLayoutSetSize(layout,N);
95: PetscLayoutSetBlockSize(layout,bs);
96: PetscLayoutSetUp(layout);
97: .ve
99: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
100: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromRanges()
102: @*/
103: PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout *map)
104: {
108: PetscLayoutCreate(comm, map);
109: PetscLayoutSetLocalSize(*map, n);
110: PetscLayoutSetSize(*map, N);
111: PetscLayoutSetBlockSize(*map, bs);
112: PetscLayoutSetUp(*map);
113: return(0);
114: }
116: /*@
117: PetscLayoutDestroy - Frees a map object and frees its range if that exists.
119: Collective
121: Input Parameters:
122: . map - the PetscLayout
124: Level: developer
126: Note:
127: The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
128: recommended they not be used in user codes unless you really gain something in their use.
130: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
131: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
133: @*/
134: PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
135: {
139: if (!*map) return(0);
140: if (!(*map)->refcnt--) {
141: if ((*map)->range_alloc) {PetscFree((*map)->range);}
142: ISLocalToGlobalMappingDestroy(&(*map)->mapping);
143: PetscFree((*map));
144: }
145: *map = NULL;
146: return(0);
147: }
149: /*@
150: PetscLayoutCreateFromRanges - Creates a new PetscLayout with the given ownership ranges and sets it up.
152: Collective
154: Input Parameters:
155: + comm - the MPI communicator
156: . range - the array of ownership ranges for each rank with length commsize+1
157: . mode - the copy mode for range
158: - bs - the block size (or PETSC_DECIDE)
160: Output Parameters:
161: . newmap - the new PetscLayout
163: Level: developer
165: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
166: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromSizes()
168: @*/
169: PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout *newmap)
170: {
171: PetscLayout map;
172: PetscMPIInt rank,size;
176: MPI_Comm_size(comm, &size);
177: MPI_Comm_rank(comm, &rank);
178: PetscLayoutCreate(comm, &map);
179: PetscLayoutSetBlockSize(map, bs);
180: switch (mode) {
181: case PETSC_COPY_VALUES:
182: PetscMalloc1(size+1, &map->range);
183: PetscArraycpy(map->range, range, size+1);
184: break;
185: case PETSC_USE_POINTER:
186: map->range_alloc = PETSC_FALSE;
187: default:
188: map->range = (PetscInt*) range;
189: break;
190: }
191: map->rstart = map->range[rank];
192: map->rend = map->range[rank+1];
193: map->n = map->rend - map->rstart;
194: map->N = map->range[size];
195: #if defined(PETSC_USE_DEBUG)
196: /* just check that n, N and bs are consistent */
197: {
198: PetscInt tmp;
199: MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);
200: if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
201: }
202: if (map->bs > 1) {
203: if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
204: }
205: if (map->bs > 1) {
206: if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
207: }
208: #endif
209: /* lock the layout */
210: map->setupcalled = PETSC_TRUE;
211: map->oldn = map->n;
212: map->oldN = map->N;
213: map->oldbs = map->bs;
214: *newmap = map;
215: return(0);
216: }
218: /*@
219: PetscLayoutSetUp - given a map where you have set either the global or local
220: size sets up the map so that it may be used.
222: Collective
224: Input Parameters:
225: . map - pointer to the map
227: Level: developer
229: Notes:
230: Typical calling sequence
231: $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
232: $ PetscLayoutSetBlockSize(PetscLayout,1);
233: $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
234: $ PetscLayoutSetUp(PetscLayout);
235: $ PetscLayoutGetSize(PetscLayout,PetscInt *);
237: If range exists, and local size is not set, everything gets computed from the range.
239: If the local size, global size are already set and range exists then this does nothing.
241: .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
242: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
243: @*/
244: PetscErrorCode PetscLayoutSetUp(PetscLayout map)
245: {
246: PetscMPIInt rank,size;
247: PetscInt p;
251: if (map->setupcalled && (map->n != map->oldn || map->N != map->oldN)) SETERRQ4(map->comm,PETSC_ERR_ARG_WRONGSTATE,"Layout is already setup with (local=%D,global=%D), cannot call setup again with (local=%D,global=%D)", map->oldn, map->oldN, map->n, map->N);
252: if (map->setupcalled) return(0);
254: if (map->n > 0 && map->bs > 1) {
255: if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
256: }
257: if (map->N > 0 && map->bs > 1) {
258: if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
259: }
261: MPI_Comm_size(map->comm, &size);
262: MPI_Comm_rank(map->comm, &rank);
263: if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
264: if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
265: PetscSplitOwnership(map->comm,&map->n,&map->N);
266: map->n = map->n*PetscAbs(map->bs);
267: map->N = map->N*PetscAbs(map->bs);
268: if (!map->range) {
269: PetscMalloc1(size+1, &map->range);
270: }
271: MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);
273: map->range[0] = 0;
274: for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
276: map->rstart = map->range[rank];
277: map->rend = map->range[rank+1];
279: /* lock the layout */
280: map->setupcalled = PETSC_TRUE;
281: map->oldn = map->n;
282: map->oldN = map->N;
283: map->oldbs = map->bs;
284: return(0);
285: }
287: /*@
288: PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
290: Collective on PetscLayout
292: Input Parameter:
293: . in - input PetscLayout to be duplicated
295: Output Parameter:
296: . out - the copy
298: Level: developer
300: Notes:
301: PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
303: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
304: @*/
305: PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
306: {
307: PetscMPIInt size;
309: MPI_Comm comm = in->comm;
312: PetscLayoutDestroy(out);
313: PetscLayoutCreate(comm,out);
314: MPI_Comm_size(comm,&size);
315: PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));
316: if (in->range) {
317: PetscMalloc1(size+1,&(*out)->range);
318: PetscArraycpy((*out)->range,in->range,size+1);
319: }
321: (*out)->refcnt = 0;
322: return(0);
323: }
325: /*@
326: PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
328: Collective on PetscLayout
330: Input Parameter:
331: . in - input PetscLayout to be copied
333: Output Parameter:
334: . out - the reference location
336: Level: developer
338: Notes:
339: PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
341: If the out location already contains a PetscLayout it is destroyed
343: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
344: @*/
345: PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
346: {
350: in->refcnt++;
351: PetscLayoutDestroy(out);
352: *out = in;
353: return(0);
354: }
356: /*@
357: PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
359: Collective on PetscLayout
361: Input Parameter:
362: + in - input PetscLayout
363: - ltog - the local to global mapping
366: Level: developer
368: Notes:
369: PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
371: If the ltog location already contains a PetscLayout it is destroyed
373: .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
374: @*/
375: PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
376: {
378: PetscInt bs;
381: ISLocalToGlobalMappingGetBlockSize(ltog,&bs);
382: if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
383: PetscObjectReference((PetscObject)ltog);
384: ISLocalToGlobalMappingDestroy(&in->mapping);
385: in->mapping = ltog;
386: return(0);
387: }
389: /*@
390: PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
392: Collective on PetscLayout
394: Input Parameters:
395: + map - pointer to the map
396: - n - the local size
398: Level: developer
400: Notes:
401: Call this after the call to PetscLayoutCreate()
403: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
404: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
405: @*/
406: PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
407: {
409: if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
410: map->n = n;
411: return(0);
412: }
414: /*@C
415: PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
417: Not Collective
419: Input Parameters:
420: . map - pointer to the map
422: Output Parameters:
423: . n - the local size
425: Level: developer
427: Notes:
428: Call this after the call to PetscLayoutSetUp()
430: Fortran Notes:
431: Not available from Fortran
433: .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
434: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
436: @*/
437: PetscErrorCode PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
438: {
440: *n = map->n;
441: return(0);
442: }
444: /*@
445: PetscLayoutSetSize - Sets the global size for a PetscLayout object.
447: Logically Collective on PetscLayout
449: Input Parameters:
450: + map - pointer to the map
451: - n - the global size
453: Level: developer
455: Notes:
456: Call this after the call to PetscLayoutCreate()
458: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
459: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
460: @*/
461: PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
462: {
464: map->N = n;
465: return(0);
466: }
468: /*@
469: PetscLayoutGetSize - Gets the global size for a PetscLayout object.
471: Not Collective
473: Input Parameters:
474: . map - pointer to the map
476: Output Parameters:
477: . n - the global size
479: Level: developer
481: Notes:
482: Call this after the call to PetscLayoutSetUp()
484: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
485: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
486: @*/
487: PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
488: {
490: *n = map->N;
491: return(0);
492: }
494: /*@
495: PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
497: Logically Collective on PetscLayout
499: Input Parameters:
500: + map - pointer to the map
501: - bs - the size
503: Level: developer
505: Notes:
506: Call this after the call to PetscLayoutCreate()
508: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
509: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
510: @*/
511: PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
512: {
514: if (bs < 0) return(0);
515: if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
516: if (map->mapping) {
517: PetscInt obs;
520: ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);
521: if (obs > 1) {
522: ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);
523: }
524: }
525: map->bs = bs;
526: return(0);
527: }
529: /*@
530: PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
532: Not Collective
534: Input Parameters:
535: . map - pointer to the map
537: Output Parameters:
538: . bs - the size
540: Level: developer
542: Notes:
543: Call this after the call to PetscLayoutSetUp()
545: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
546: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
547: @*/
548: PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
549: {
551: *bs = PetscAbs(map->bs);
552: return(0);
553: }
555: /*@
556: PetscLayoutGetRange - gets the range of values owned by this process
558: Not Collective
560: Input Parameters:
561: . map - pointer to the map
563: Output Parameters:
564: + rstart - first index owned by this process
565: - rend - one more than the last index owned by this process
567: Level: developer
569: Notes:
570: Call this after the call to PetscLayoutSetUp()
572: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
573: PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
574: @*/
575: PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
576: {
578: if (rstart) *rstart = map->rstart;
579: if (rend) *rend = map->rend;
580: return(0);
581: }
583: /*@C
584: PetscLayoutGetRanges - gets the range of values owned by all processes
586: Not Collective
588: Input Parameters:
589: . map - pointer to the map
591: Output Parameters:
592: . range - start of each processors range of indices (the final entry is one more then the
593: last index on the last process)
595: Level: developer
597: Notes:
598: Call this after the call to PetscLayoutSetUp()
600: Fortran Notes:
601: Not available from Fortran
603: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
604: PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
606: @*/
607: PetscErrorCode PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
608: {
610: *range = map->range;
611: return(0);
612: }
614: /*@C
615: PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
617: Collective
619: Input Arguments:
620: + sf - star forest
621: . layout - PetscLayout defining the global space
622: . nleaves - number of leaf vertices on the current process, each of these references a root on any process
623: . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
624: . localmode - copy mode for ilocal
625: - iremote - remote locations of root vertices for each leaf on the current process
627: Level: intermediate
629: Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
630: encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
631: needed
633: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
634: @*/
635: PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
636: {
638: PetscInt i,nroots;
639: PetscSFNode *remote;
642: PetscLayoutGetLocalSize(layout,&nroots);
643: PetscMalloc1(nleaves,&remote);
644: for (i=0; i<nleaves; i++) {
645: PetscInt owner = -1;
646: PetscLayoutFindOwner(layout,iremote[i],&owner);
647: remote[i].rank = owner;
648: remote[i].index = iremote[i] - layout->range[owner];
649: }
650: PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);
651: return(0);
652: }
654: /*@
655: PetscLayoutCompare - Compares two layouts
657: Not Collective
659: Input Parameters:
660: + mapa - pointer to the first map
661: - mapb - pointer to the second map
663: Output Parameters:
664: . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
666: Level: beginner
668: Notes:
670: .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
671: PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
672: @*/
673: PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
674: {
676: PetscMPIInt sizea,sizeb;
679: *congruent = PETSC_FALSE;
680: MPI_Comm_size(mapa->comm,&sizea);
681: MPI_Comm_size(mapb->comm,&sizeb);
682: if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
683: PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);
684: }
685: return(0);
686: }
688: /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
689: PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
690: {
691: PetscInt *owners = map->range;
692: PetscInt n = map->n;
693: PetscSF sf;
694: PetscInt *lidxs,*work = NULL;
695: PetscSFNode *ridxs;
696: PetscMPIInt rank;
697: PetscInt r, p = 0, len = 0;
701: if (on) *on = 0; /* squelch -Wmaybe-uninitialized */
702: /* Create SF where leaves are input idxs and roots are owned idxs */
703: MPI_Comm_rank(map->comm,&rank);
704: PetscMalloc1(n,&lidxs);
705: for (r = 0; r < n; ++r) lidxs[r] = -1;
706: PetscMalloc1(N,&ridxs);
707: for (r = 0; r < N; ++r) {
708: const PetscInt idx = idxs[r];
709: if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
710: if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
711: PetscLayoutFindOwner(map,idx,&p);
712: }
713: ridxs[r].rank = p;
714: ridxs[r].index = idxs[r] - owners[p];
715: }
716: PetscSFCreate(map->comm,&sf);
717: PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);
718: PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
719: PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
720: if (ogidxs) { /* communicate global idxs */
721: PetscInt cum = 0,start,*work2;
723: PetscMalloc1(n,&work);
724: PetscCalloc1(N,&work2);
725: for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
726: MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);
727: start -= cum;
728: cum = 0;
729: for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
730: PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);
731: PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);
732: PetscFree(work2);
733: }
734: PetscSFDestroy(&sf);
735: /* Compress and put in indices */
736: for (r = 0; r < n; ++r)
737: if (lidxs[r] >= 0) {
738: if (work) work[len] = work[r];
739: lidxs[len++] = r;
740: }
741: if (on) *on = len;
742: if (oidxs) *oidxs = lidxs;
743: if (ogidxs) *ogidxs = work;
744: return(0);
745: }