Actual source code: isltog.c
2: #include <petsc/private/isimpl.h>
3: #include <petsc/private/hashmapi.h>
4: #include <petscsf.h>
5: #include <petscviewer.h>
7: PetscClassId IS_LTOGM_CLASSID;
8: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***);
10: typedef struct {
11: PetscInt *globals;
12: } ISLocalToGlobalMapping_Basic;
14: typedef struct {
15: PetscHMapI globalht;
16: } ISLocalToGlobalMapping_Hash;
18: /*@C
19: ISGetPointRange - Returns a description of the points in an `IS` suitable for traversal
21: Not collective
23: Input Parameter:
24: . pointIS - The `IS` object
26: Output Parameters:
27: + pStart - The first index, see notes
28: . pEnd - One past the last index, see notes
29: - points - The indices, see notes
31: Level: intermediate
33: Notes:
34: If the `IS` contains contiguous indices in an `ISSTRIDE`, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
35: .vb
36: ISGetPointRange(is, &pStart, &pEnd, &points);
37: for (p = pStart; p < pEnd; ++p) {
38: const PetscInt point = points ? points[p] : p;
39: }
40: ISRestorePointRange(is, &pstart, &pEnd, &points);
41: .ve
43: .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
44: @*/
45: PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
46: {
47: PetscInt numCells, step = 1;
48: PetscBool isStride;
50: PetscFunctionBeginHot;
51: *pStart = 0;
52: *points = NULL;
53: PetscCall(ISGetLocalSize(pointIS, &numCells));
54: PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride));
55: if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
56: *pEnd = *pStart + numCells;
57: if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /*@C
62: ISRestorePointRange - Destroys the traversal description created with `ISGetPointRange()`
64: Not collective
66: Input Parameters:
67: + pointIS - The `IS` object
68: . pStart - The first index, from `ISGetPointRange()`
69: . pEnd - One past the last index, from `ISGetPointRange()`
70: - points - The indices, from `ISGetPointRange()`
72: Level: intermediate
74: .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
75: @*/
76: PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
77: {
78: PetscInt step = 1;
79: PetscBool isStride;
81: PetscFunctionBeginHot;
82: PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride));
83: if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
84: if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
85: PetscFunctionReturn(PETSC_SUCCESS);
86: }
88: /*@C
89: ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given
91: Not collective
93: Input Parameters:
94: + subpointIS - The `IS` object to be configured
95: . pStar t - The first index of the subrange
96: . pEnd - One past the last index for the subrange
97: - points - The indices for the entire range, from `ISGetPointRange()`
99: Output Parameters:
100: . subpointIS - The `IS` object now configured to be a subrange
102: Level: intermediate
104: Note:
105: The input `IS` will now respond properly to calls to `ISGetPointRange()` and return the subrange.
107: .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
108: @*/
109: PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
110: {
111: PetscFunctionBeginHot;
112: if (points) {
113: PetscCall(ISSetType(subpointIS, ISGENERAL));
114: PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER));
115: } else {
116: PetscCall(ISSetType(subpointIS, ISSTRIDE));
117: PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1));
118: }
119: PetscFunctionReturn(PETSC_SUCCESS);
120: }
122: /* -----------------------------------------------------------------------------------------*/
124: /*
125: Creates the global mapping information in the ISLocalToGlobalMapping structure
127: If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
128: */
129: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
130: {
131: PetscInt i, *idx = mapping->indices, n = mapping->n, end, start;
133: PetscFunctionBegin;
134: if (mapping->data) PetscFunctionReturn(PETSC_SUCCESS);
135: end = 0;
136: start = PETSC_MAX_INT;
138: for (i = 0; i < n; i++) {
139: if (idx[i] < 0) continue;
140: if (idx[i] < start) start = idx[i];
141: if (idx[i] > end) end = idx[i];
142: }
143: if (start > end) {
144: start = 0;
145: end = -1;
146: }
147: mapping->globalstart = start;
148: mapping->globalend = end;
149: if (!((PetscObject)mapping)->type_name) {
150: if ((end - start) > PetscMax(4 * n, 1000000)) {
151: PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH));
152: } else {
153: PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC));
154: }
155: }
156: PetscTryTypeMethod(mapping, globaltolocalmappingsetup);
157: PetscFunctionReturn(PETSC_SUCCESS);
158: }
160: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
161: {
162: PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals;
163: ISLocalToGlobalMapping_Basic *map;
165: PetscFunctionBegin;
166: start = mapping->globalstart;
167: end = mapping->globalend;
168: PetscCall(PetscNew(&map));
169: PetscCall(PetscMalloc1(end - start + 2, &globals));
170: map->globals = globals;
171: for (i = 0; i < end - start + 1; i++) globals[i] = -1;
172: for (i = 0; i < n; i++) {
173: if (idx[i] < 0) continue;
174: globals[idx[i] - start] = i;
175: }
176: mapping->data = (void *)map;
177: PetscFunctionReturn(PETSC_SUCCESS);
178: }
180: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
181: {
182: PetscInt i, *idx = mapping->indices, n = mapping->n;
183: ISLocalToGlobalMapping_Hash *map;
185: PetscFunctionBegin;
186: PetscCall(PetscNew(&map));
187: PetscCall(PetscHMapICreate(&map->globalht));
188: for (i = 0; i < n; i++) {
189: if (idx[i] < 0) continue;
190: PetscCall(PetscHMapISet(map->globalht, idx[i], i));
191: }
192: mapping->data = (void *)map;
193: PetscFunctionReturn(PETSC_SUCCESS);
194: }
196: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
197: {
198: ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data;
200: PetscFunctionBegin;
201: if (!map) PetscFunctionReturn(PETSC_SUCCESS);
202: PetscCall(PetscFree(map->globals));
203: PetscCall(PetscFree(mapping->data));
204: PetscFunctionReturn(PETSC_SUCCESS);
205: }
207: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
208: {
209: ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data;
211: PetscFunctionBegin;
212: if (!map) PetscFunctionReturn(PETSC_SUCCESS);
213: PetscCall(PetscHMapIDestroy(&map->globalht));
214: PetscCall(PetscFree(mapping->data));
215: PetscFunctionReturn(PETSC_SUCCESS);
216: }
218: #define GTOLTYPE _Basic
219: #define GTOLNAME _Basic
220: #define GTOLBS mapping->bs
221: #define GTOL(g, local) \
222: do { \
223: local = map->globals[g / bs - start]; \
224: if (local >= 0) local = bs * local + (g % bs); \
225: } while (0)
227: #include <../src/vec/is/utils/isltog.h>
229: #define GTOLTYPE _Basic
230: #define GTOLNAME Block_Basic
231: #define GTOLBS 1
232: #define GTOL(g, local) \
233: do { \
234: local = map->globals[g - start]; \
235: } while (0)
236: #include <../src/vec/is/utils/isltog.h>
238: #define GTOLTYPE _Hash
239: #define GTOLNAME _Hash
240: #define GTOLBS mapping->bs
241: #define GTOL(g, local) \
242: do { \
243: (void)PetscHMapIGet(map->globalht, g / bs, &local); \
244: if (local >= 0) local = bs * local + (g % bs); \
245: } while (0)
246: #include <../src/vec/is/utils/isltog.h>
248: #define GTOLTYPE _Hash
249: #define GTOLNAME Block_Hash
250: #define GTOLBS 1
251: #define GTOL(g, local) \
252: do { \
253: (void)PetscHMapIGet(map->globalht, g, &local); \
254: } while (0)
255: #include <../src/vec/is/utils/isltog.h>
257: /*@
258: ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260: Not Collective
262: Input Parameter:
263: . ltog - local to global mapping
265: Output Parameter:
266: . nltog - the duplicated local to global mapping
268: Level: advanced
270: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
271: @*/
272: PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog)
273: {
274: ISLocalToGlobalMappingType l2gtype;
276: PetscFunctionBegin;
278: PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog));
279: PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype));
280: PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype));
281: PetscFunctionReturn(PETSC_SUCCESS);
282: }
284: /*@
285: ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287: Not Collective
289: Input Parameter:
290: . ltog - local to global mapping
292: Output Parameter:
293: . n - the number of entries in the local mapping, `ISLocalToGlobalMappingGetIndices()` returns an array of this length
295: Level: advanced
297: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
298: @*/
299: PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n)
300: {
301: PetscFunctionBegin;
304: *n = mapping->bs * mapping->n;
305: PetscFunctionReturn(PETSC_SUCCESS);
306: }
308: /*@C
309: ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database
311: Collective
313: Input Parameters:
314: + A - the local to global mapping object
315: . obj - Optional object
316: - name - command line option
318: Level: intermediate
320: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
321: @*/
322: PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[])
323: {
324: PetscFunctionBegin;
326: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
327: PetscFunctionReturn(PETSC_SUCCESS);
328: }
330: /*@C
331: ISLocalToGlobalMappingView - View a local to global mapping
333: Not Collective
335: Input Parameters:
336: + ltog - local to global mapping
337: - viewer - viewer
339: Level: advanced
341: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
342: @*/
343: PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer)
344: {
345: PetscInt i;
346: PetscMPIInt rank;
347: PetscBool iascii;
349: PetscFunctionBegin;
351: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer));
354: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank));
355: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
356: if (iascii) {
357: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer));
358: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
359: for (i = 0; i < mapping->n; i++) {
360: PetscInt bs = mapping->bs, g = mapping->indices[i];
361: if (bs == 1) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, g));
362: else PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT ":%" PetscInt_FMT " %" PetscInt_FMT ":%" PetscInt_FMT "\n", rank, i * bs, (i + 1) * bs, g * bs, (g + 1) * bs));
363: }
364: PetscCall(PetscViewerFlush(viewer));
365: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366: }
367: PetscFunctionReturn(PETSC_SUCCESS);
368: }
370: /*@
371: ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372: ordering and a global parallel ordering.
374: Not collective
376: Input Parameter:
377: . is - index set containing the global numbers for each local number
379: Output Parameter:
380: . mapping - new mapping data structure
382: Level: advanced
384: Note:
385: the block size of the `IS` determines the block size of the mapping
387: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
388: @*/
389: PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping)
390: {
391: PetscInt n, bs;
392: const PetscInt *indices;
393: MPI_Comm comm;
394: PetscBool isblock;
396: PetscFunctionBegin;
400: PetscCall(PetscObjectGetComm((PetscObject)is, &comm));
401: PetscCall(ISGetLocalSize(is, &n));
402: PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock));
403: if (!isblock) {
404: PetscCall(ISGetIndices(is, &indices));
405: PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping));
406: PetscCall(ISRestoreIndices(is, &indices));
407: } else {
408: PetscCall(ISGetBlockSize(is, &bs));
409: PetscCall(ISBlockGetIndices(is, &indices));
410: PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping));
411: PetscCall(ISBlockRestoreIndices(is, &indices));
412: }
413: PetscFunctionReturn(PETSC_SUCCESS);
414: }
416: /*@C
417: ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
418: ordering and a global parallel ordering.
420: Collective
422: Input Parameters:
423: + sf - star forest mapping contiguous local indices to (rank, offset)
424: - start - first global index on this process, or `PETSC_DECIDE` to compute contiguous global numbering automatically
426: Output Parameter:
427: . mapping - new mapping data structure
429: Level: advanced
431: Notes:
432: If any processor calls this with start = `PETSC_DECIDE` then all processors must, otherwise the program will hang.
434: .seealso: [](sec_scatter), `PetscSF`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
435: @*/
436: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping)
437: {
438: PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog;
439: MPI_Comm comm;
441: PetscFunctionBegin;
444: PetscCall(PetscObjectGetComm((PetscObject)sf, &comm));
445: PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL));
446: if (start == PETSC_DECIDE) {
447: start = 0;
448: PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm));
449: } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
450: PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
451: ++maxlocal;
452: PetscCall(PetscMalloc1(nroots, &globals));
453: PetscCall(PetscMalloc1(maxlocal, <og));
454: for (i = 0; i < nroots; i++) globals[i] = start + i;
455: for (i = 0; i < maxlocal; i++) ltog[i] = -1;
456: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE));
457: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE));
458: PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping));
459: PetscCall(PetscFree(globals));
460: PetscFunctionReturn(PETSC_SUCCESS);
461: }
463: /*@
464: ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
466: Not collective
468: Input Parameters:
469: + mapping - mapping data structure
470: - bs - the blocksize
472: Level: advanced
474: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
475: @*/
476: PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs)
477: {
478: PetscInt *nid;
479: const PetscInt *oid;
480: PetscInt i, cn, on, obs, nn;
482: PetscFunctionBegin;
484: PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs);
485: if (bs == mapping->bs) PetscFunctionReturn(PETSC_SUCCESS);
486: on = mapping->n;
487: obs = mapping->bs;
488: oid = mapping->indices;
489: nn = (on * obs) / bs;
490: PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on);
492: PetscCall(PetscMalloc1(nn, &nid));
493: PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid));
494: for (i = 0; i < nn; i++) {
495: PetscInt j;
496: for (j = 0, cn = 0; j < bs - 1; j++) {
497: if (oid[i * bs + j] < 0) {
498: cn++;
499: continue;
500: }
501: PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]);
502: }
503: if (oid[i * bs + j] < 0) cn++;
504: if (cn) {
505: PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn);
506: nid[i] = -1;
507: } else {
508: nid[i] = oid[i * bs] / bs;
509: }
510: }
511: PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid));
513: mapping->n = nn;
514: mapping->bs = bs;
515: PetscCall(PetscFree(mapping->indices));
516: mapping->indices = nid;
517: mapping->globalstart = 0;
518: mapping->globalend = 0;
520: /* reset the cached information */
521: PetscCall(PetscFree(mapping->info_procs));
522: PetscCall(PetscFree(mapping->info_numprocs));
523: if (mapping->info_indices) {
524: PetscInt i;
526: PetscCall(PetscFree((mapping->info_indices)[0]));
527: for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i]));
528: PetscCall(PetscFree(mapping->info_indices));
529: }
530: mapping->info_cached = PETSC_FALSE;
532: PetscTryTypeMethod(mapping, destroy);
533: PetscFunctionReturn(PETSC_SUCCESS);
534: }
536: /*@
537: ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
538: ordering and a global parallel ordering.
540: Not Collective
542: Input Parameters:
543: . mapping - mapping data structure
545: Output Parameter:
546: . bs - the blocksize
548: Level: advanced
550: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
551: @*/
552: PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs)
553: {
554: PetscFunctionBegin;
556: *bs = mapping->bs;
557: PetscFunctionReturn(PETSC_SUCCESS);
558: }
560: /*@
561: ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
562: ordering and a global parallel ordering.
564: Not Collective, but communicator may have more than one process
566: Input Parameters:
567: + comm - MPI communicator
568: . bs - the block size
569: . n - the number of local elements divided by the block size, or equivalently the number of block indices
570: . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
571: - mode - see PetscCopyMode
573: Output Parameter:
574: . mapping - new mapping data structure
576: Level: advanced
578: Notes:
579: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
581: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType`
582: of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; this uses more memory but is faster; this approach is not scalable for extremely large mappings.
584: For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
585: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
587: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
588: `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
589: @*/
590: PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping)
591: {
592: PetscInt *in;
594: PetscFunctionBegin;
598: *mapping = NULL;
599: PetscCall(ISInitializePackage());
601: PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView));
602: (*mapping)->n = n;
603: (*mapping)->bs = bs;
604: if (mode == PETSC_COPY_VALUES) {
605: PetscCall(PetscMalloc1(n, &in));
606: PetscCall(PetscArraycpy(in, indices, n));
607: (*mapping)->indices = in;
608: (*mapping)->dealloc_indices = PETSC_TRUE;
609: } else if (mode == PETSC_OWN_POINTER) {
610: (*mapping)->indices = (PetscInt *)indices;
611: (*mapping)->dealloc_indices = PETSC_TRUE;
612: } else if (mode == PETSC_USE_POINTER) {
613: (*mapping)->indices = (PetscInt *)indices;
614: } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode);
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
618: PetscFunctionList ISLocalToGlobalMappingList = NULL;
620: /*@
621: ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
623: Not collective
625: Input Parameters:
626: . mapping - mapping data structure
628: Level: advanced
630: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
631: `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
632: @*/
633: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
634: {
635: char type[256];
636: ISLocalToGlobalMappingType defaulttype = "Not set";
637: PetscBool flg;
639: PetscFunctionBegin;
641: PetscCall(ISLocalToGlobalMappingRegisterAll());
642: PetscObjectOptionsBegin((PetscObject)mapping);
643: PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg));
644: if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type));
645: PetscOptionsEnd();
646: PetscFunctionReturn(PETSC_SUCCESS);
647: }
649: /*@
650: ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
651: ordering and a global parallel ordering.
653: Note Collective
655: Input Parameters:
656: . mapping - mapping data structure
658: Level: advanced
660: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
661: @*/
662: PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
663: {
664: PetscFunctionBegin;
665: if (!*mapping) PetscFunctionReturn(PETSC_SUCCESS);
667: if (--((PetscObject)(*mapping))->refct > 0) {
668: *mapping = NULL;
669: PetscFunctionReturn(PETSC_SUCCESS);
670: }
671: if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices));
672: PetscCall(PetscFree((*mapping)->info_procs));
673: PetscCall(PetscFree((*mapping)->info_numprocs));
674: if ((*mapping)->info_indices) {
675: PetscInt i;
677: PetscCall(PetscFree(((*mapping)->info_indices)[0]));
678: for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i]));
679: PetscCall(PetscFree((*mapping)->info_indices));
680: }
681: if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
682: PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei));
683: if ((*mapping)->ops->destroy) PetscCall((*(*mapping)->ops->destroy)(*mapping));
684: PetscCall(PetscHeaderDestroy(mapping));
685: *mapping = NULL;
686: PetscFunctionReturn(PETSC_SUCCESS);
687: }
689: /*@
690: ISLocalToGlobalMappingApplyIS - Creates from an `IS` in the local numbering
691: a new index set using the global numbering defined in an `ISLocalToGlobalMapping`
692: context.
694: Collective
696: Input Parameters:
697: + mapping - mapping between local and global numbering
698: - is - index set in local numbering
700: Output Parameter:
701: . newis - index set in global numbering
703: Level: advanced
705: Note:
706: The output `IS` will have the same communicator of the input `IS`.
708: .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
709: `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
710: @*/
711: PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis)
712: {
713: PetscInt n, *idxout;
714: const PetscInt *idxin;
716: PetscFunctionBegin;
721: PetscCall(ISGetLocalSize(is, &n));
722: PetscCall(ISGetIndices(is, &idxin));
723: PetscCall(PetscMalloc1(n, &idxout));
724: PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout));
725: PetscCall(ISRestoreIndices(is, &idxin));
726: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis));
727: PetscFunctionReturn(PETSC_SUCCESS);
728: }
730: /*@
731: ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
732: and converts them to the global numbering.
734: Not collective
736: Input Parameters:
737: + mapping - the local to global mapping context
738: . N - number of integers
739: - in - input indices in local numbering
741: Output Parameter:
742: . out - indices in global numbering
744: Level: advanced
746: Note:
747: The in and out array parameters may be identical.
749: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
750: `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
751: `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
752: @*/
753: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
754: {
755: PetscInt i, bs, Nmax;
757: PetscFunctionBegin;
759: bs = mapping->bs;
760: Nmax = bs * mapping->n;
761: if (bs == 1) {
762: const PetscInt *idx = mapping->indices;
763: for (i = 0; i < N; i++) {
764: if (in[i] < 0) {
765: out[i] = in[i];
766: continue;
767: }
768: PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
769: out[i] = idx[in[i]];
770: }
771: } else {
772: const PetscInt *idx = mapping->indices;
773: for (i = 0; i < N; i++) {
774: if (in[i] < 0) {
775: out[i] = in[i];
776: continue;
777: }
778: PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
779: out[i] = idx[in[i] / bs] * bs + (in[i] % bs);
780: }
781: }
782: PetscFunctionReturn(PETSC_SUCCESS);
783: }
785: /*@
786: ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
788: Not collective
790: Input Parameters:
791: + mapping - the local to global mapping context
792: . N - number of integers
793: - in - input indices in local block numbering
795: Output Parameter:
796: . out - indices in global block numbering
798: Level: advanced
800: Note:
801: The in and out array parameters may be identical.
803: Example:
804: If the index values are {0,1,6,7} set with a call to `ISLocalToGlobalMappingCreate`(`PETSC_COMM_SELF`,2,2,{0,3}) then the mapping applied to 0
805: (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
807: .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
808: `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
809: `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
810: @*/
811: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
812: {
813: PetscInt i, Nmax;
814: const PetscInt *idx;
816: PetscFunctionBegin;
818: Nmax = mapping->n;
819: idx = mapping->indices;
820: for (i = 0; i < N; i++) {
821: if (in[i] < 0) {
822: out[i] = in[i];
823: continue;
824: }
825: PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
826: out[i] = idx[in[i]];
827: }
828: PetscFunctionReturn(PETSC_SUCCESS);
829: }
831: /*@
832: ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
833: specified with a global numbering.
835: Not collective
837: Input Parameters:
838: + mapping - mapping between local and global numbering
839: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
840: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
841: . n - number of global indices to map
842: - idx - global indices to map
844: Output Parameters:
845: + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
846: - idxout - local index of each global index, one must pass in an array long enough
847: to hold all the indices. You can call `ISGlobalToLocalMappingApply()` with
848: idxout == NULL to determine the required length (returned in nout)
849: and then allocate the required space and call `ISGlobalToLocalMappingApply()`
850: a second time to set the values.
852: Level: advanced
854: Notes:
855: Either nout or idxout may be NULL. idx and idxout may be identical.
857: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
858: this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
859: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
861: Developer Note:
862: The manual page states that idx and idxout may be identical but the calling
863: sequence declares idx as const so it cannot be the same as idxout.
865: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
866: `ISLocalToGlobalMappingDestroy()`
867: @*/
868: PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
869: {
870: PetscFunctionBegin;
872: if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping));
873: PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout);
874: PetscFunctionReturn(PETSC_SUCCESS);
875: }
877: /*@
878: ISGlobalToLocalMappingApplyIS - Creates from an `IS` in the global numbering
879: a new index set using the local numbering defined in an `ISLocalToGlobalMapping`
880: context.
882: Not collective
884: Input Parameters:
885: + mapping - mapping between local and global numbering
886: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
887: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
888: - is - index set in global numbering
890: Output Parameters:
891: . newis - index set in local numbering
893: Level: advanced
895: Note:
896: The output `IS` will be sequential, as it encodes a purely local operation
898: .seealso: [](sec_scatter), `ISGlobalToLocalMapping`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
899: `ISLocalToGlobalMappingDestroy()`
900: @*/
901: PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis)
902: {
903: PetscInt n, nout, *idxout;
904: const PetscInt *idxin;
906: PetscFunctionBegin;
911: PetscCall(ISGetLocalSize(is, &n));
912: PetscCall(ISGetIndices(is, &idxin));
913: if (type == IS_GTOLM_MASK) {
914: PetscCall(PetscMalloc1(n, &idxout));
915: } else {
916: PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL));
917: PetscCall(PetscMalloc1(nout, &idxout));
918: }
919: PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout));
920: PetscCall(ISRestoreIndices(is, &idxin));
921: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis));
922: PetscFunctionReturn(PETSC_SUCCESS);
923: }
925: /*@
926: ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
927: specified with a block global numbering.
929: Not collective
931: Input Parameters:
932: + mapping - mapping between local and global numbering
933: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
934: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
935: . n - number of global indices to map
936: - idx - global indices to map
938: Output Parameters:
939: + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
940: - idxout - local index of each global index, one must pass in an array long enough
941: to hold all the indices. You can call `ISGlobalToLocalMappingApplyBlock()` with
942: idxout == NULL to determine the required length (returned in nout)
943: and then allocate the required space and call `ISGlobalToLocalMappingApplyBlock()`
944: a second time to set the values.
946: Level: advanced
948: Notes:
949: Either nout or idxout may be NULL. idx and idxout may be identical.
951: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
952: this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
953: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
955: Developer Note:
956: The manual page states that idx and idxout may be identical but the calling
957: sequence declares idx as const so it cannot be the same as idxout.
959: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
960: `ISLocalToGlobalMappingDestroy()`
961: @*/
962: PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
963: {
964: PetscFunctionBegin;
966: if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping));
967: PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout);
968: PetscFunctionReturn(PETSC_SUCCESS);
969: }
971: /*@C
972: ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
973: each index shared by more than one processor
975: Collective
977: Input Parameter:
978: . mapping - the mapping from local to global indexing
980: Output Parameters:
981: + nproc - number of processors that are connected to this one
982: . proc - neighboring processors
983: . numproc - number of indices for each subdomain (processor)
984: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
986: Level: advanced
988: Fortran Usage:
989: .vb
990: PetscInt indices[nproc][numprocmax],ierr)
991: ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
992: ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
993: .ve
995: Fortran Note:
996: There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and
997: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
999: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1000: `ISLocalToGlobalMappingRestoreInfo()`
1001: @*/
1002: PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1003: {
1004: PetscFunctionBegin;
1006: if (mapping->info_cached) {
1007: *nproc = mapping->info_nproc;
1008: *procs = mapping->info_procs;
1009: *numprocs = mapping->info_numprocs;
1010: *indices = mapping->info_indices;
1011: } else {
1012: PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices));
1013: }
1014: PetscFunctionReturn(PETSC_SUCCESS);
1015: }
1017: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1018: {
1019: PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex;
1020: PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices;
1021: PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc;
1022: PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart;
1023: PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp;
1024: PetscInt first_procs, first_numprocs, *first_indices;
1025: MPI_Request *recv_waits, *send_waits;
1026: MPI_Status recv_status, *send_status, *recv_statuses;
1027: MPI_Comm comm;
1028: PetscBool debug = PETSC_FALSE;
1030: PetscFunctionBegin;
1031: PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm));
1032: PetscCallMPI(MPI_Comm_size(comm, &size));
1033: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1034: if (size == 1) {
1035: *nproc = 0;
1036: *procs = NULL;
1037: PetscCall(PetscNew(numprocs));
1038: (*numprocs)[0] = 0;
1039: PetscCall(PetscNew(indices));
1040: (*indices)[0] = NULL;
1041: /* save info for reuse */
1042: mapping->info_nproc = *nproc;
1043: mapping->info_procs = *procs;
1044: mapping->info_numprocs = *numprocs;
1045: mapping->info_indices = *indices;
1046: mapping->info_cached = PETSC_TRUE;
1047: PetscFunctionReturn(PETSC_SUCCESS);
1048: }
1050: PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL));
1052: /*
1053: Notes on ISLocalToGlobalMappingGetBlockInfo
1055: globally owned node - the nodes that have been assigned to this processor in global
1056: numbering, just for this routine.
1058: nontrivial globally owned node - node assigned to this processor that is on a subdomain
1059: boundary (i.e. is has more than one local owner)
1061: locally owned node - node that exists on this processors subdomain
1063: nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1064: local subdomain
1065: */
1066: PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1));
1067: PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2));
1068: PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3));
1070: for (i = 0; i < n; i++) {
1071: if (lindices[i] > max) max = lindices[i];
1072: }
1073: PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm));
1074: Ng++;
1075: PetscCallMPI(MPI_Comm_size(comm, &size));
1076: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1077: scale = Ng / size + 1;
1078: ng = scale;
1079: if (rank == size - 1) ng = Ng - scale * (size - 1);
1080: ng = PetscMax(1, ng);
1081: rstart = scale * rank;
1083: /* determine ownership ranges of global indices */
1084: PetscCall(PetscMalloc1(2 * size, &nprocs));
1085: PetscCall(PetscArrayzero(nprocs, 2 * size));
1087: /* determine owners of each local node */
1088: PetscCall(PetscMalloc1(n, &owner));
1089: for (i = 0; i < n; i++) {
1090: proc = lindices[i] / scale; /* processor that globally owns this index */
1091: nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */
1092: owner[i] = proc;
1093: nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */
1094: }
1095: nsends = 0;
1096: for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1];
1097: PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends));
1099: /* inform other processors of number of messages and max length*/
1100: PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs));
1101: PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs));
1103: /* post receives for owned rows */
1104: PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs));
1105: PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits));
1106: for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i));
1108: /* pack messages containing lists of local nodes to owners */
1109: PetscCall(PetscMalloc1(2 * n + 1, &sends));
1110: PetscCall(PetscMalloc1(size + 1, &starts));
1111: starts[0] = 0;
1112: for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1113: for (i = 0; i < n; i++) {
1114: sends[starts[owner[i]]++] = lindices[i];
1115: sends[starts[owner[i]]++] = i;
1116: }
1117: PetscCall(PetscFree(owner));
1118: starts[0] = 0;
1119: for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1121: /* send the messages */
1122: PetscCall(PetscMalloc1(nsends + 1, &send_waits));
1123: PetscCall(PetscMalloc1(nsends + 1, &dest));
1124: cnt = 0;
1125: for (i = 0; i < size; i++) {
1126: if (nprocs[2 * i]) {
1127: PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt));
1128: dest[cnt] = i;
1129: cnt++;
1130: }
1131: }
1132: PetscCall(PetscFree(starts));
1134: /* wait on receives */
1135: PetscCall(PetscMalloc1(nrecvs + 1, &source));
1136: PetscCall(PetscMalloc1(nrecvs + 1, &len));
1137: cnt = nrecvs;
1138: PetscCall(PetscCalloc1(ng + 1, &nownedsenders));
1139: while (cnt) {
1140: PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status));
1141: /* unpack receives into our local space */
1142: PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex]));
1143: source[imdex] = recv_status.MPI_SOURCE;
1144: len[imdex] = len[imdex] / 2;
1145: /* count how many local owners for each of my global owned indices */
1146: for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++;
1147: cnt--;
1148: }
1149: PetscCall(PetscFree(recv_waits));
1151: /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1152: nownedm = 0;
1153: for (i = 0; i < ng; i++) {
1154: if (nownedsenders[i] > 1) nownedm += nownedsenders[i];
1155: }
1157: /* create single array to contain rank of all local owners of each globally owned index */
1158: PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders));
1159: PetscCall(PetscMalloc1(ng + 1, &starts));
1160: starts[0] = 0;
1161: for (i = 1; i < ng; i++) {
1162: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1163: else starts[i] = starts[i - 1];
1164: }
1166: /* for each nontrivial globally owned node list all arriving processors */
1167: for (i = 0; i < nrecvs; i++) {
1168: for (j = 0; j < len[i]; j++) {
1169: node = recvs[2 * i * nmax + 2 * j] - rstart;
1170: if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1171: }
1172: }
1174: if (debug) { /* ----------------------------------- */
1175: starts[0] = 0;
1176: for (i = 1; i < ng; i++) {
1177: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1178: else starts[i] = starts[i - 1];
1179: }
1180: for (i = 0; i < ng; i++) {
1181: if (nownedsenders[i] > 1) {
1182: PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart));
1183: for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j]));
1184: PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1185: }
1186: }
1187: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1188: } /* ----------------------------------- */
1190: /* wait on original sends */
1191: if (nsends) {
1192: PetscCall(PetscMalloc1(nsends, &send_status));
1193: PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status));
1194: PetscCall(PetscFree(send_status));
1195: }
1196: PetscCall(PetscFree(send_waits));
1197: PetscCall(PetscFree(sends));
1198: PetscCall(PetscFree(nprocs));
1200: /* pack messages to send back to local owners */
1201: starts[0] = 0;
1202: for (i = 1; i < ng; i++) {
1203: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1204: else starts[i] = starts[i - 1];
1205: }
1206: nsends2 = nrecvs;
1207: PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */
1208: for (i = 0; i < nrecvs; i++) {
1209: nprocs[i] = 1;
1210: for (j = 0; j < len[i]; j++) {
1211: node = recvs[2 * i * nmax + 2 * j] - rstart;
1212: if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1213: }
1214: }
1215: nt = 0;
1216: for (i = 0; i < nsends2; i++) nt += nprocs[i];
1218: PetscCall(PetscMalloc1(nt + 1, &sends2));
1219: PetscCall(PetscMalloc1(nsends2 + 1, &starts2));
1221: starts2[0] = 0;
1222: for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1];
1223: /*
1224: Each message is 1 + nprocs[i] long, and consists of
1225: (0) the number of nodes being sent back
1226: (1) the local node number,
1227: (2) the number of processors sharing it,
1228: (3) the processors sharing it
1229: */
1230: for (i = 0; i < nsends2; i++) {
1231: cnt = 1;
1232: sends2[starts2[i]] = 0;
1233: for (j = 0; j < len[i]; j++) {
1234: node = recvs[2 * i * nmax + 2 * j] - rstart;
1235: if (nownedsenders[node] > 1) {
1236: sends2[starts2[i]]++;
1237: sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1];
1238: sends2[starts2[i] + cnt++] = nownedsenders[node];
1239: PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node]));
1240: cnt += nownedsenders[node];
1241: }
1242: }
1243: }
1245: /* receive the message lengths */
1246: nrecvs2 = nsends;
1247: PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2));
1248: PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3));
1249: PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits));
1250: for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i));
1252: /* send the message lengths */
1253: for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm));
1255: /* wait on receives of lens */
1256: if (nrecvs2) {
1257: PetscCall(PetscMalloc1(nrecvs2, &recv_statuses));
1258: PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses));
1259: PetscCall(PetscFree(recv_statuses));
1260: }
1261: PetscCall(PetscFree(recv_waits));
1263: starts3[0] = 0;
1264: nt = 0;
1265: for (i = 0; i < nrecvs2 - 1; i++) {
1266: starts3[i + 1] = starts3[i] + lens2[i];
1267: nt += lens2[i];
1268: }
1269: if (nrecvs2) nt += lens2[nrecvs2 - 1];
1271: PetscCall(PetscMalloc1(nt + 1, &recvs2));
1272: PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits));
1273: for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i));
1275: /* send the messages */
1276: PetscCall(PetscMalloc1(nsends2 + 1, &send_waits));
1277: for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i));
1279: /* wait on receives */
1280: if (nrecvs2) {
1281: PetscCall(PetscMalloc1(nrecvs2, &recv_statuses));
1282: PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses));
1283: PetscCall(PetscFree(recv_statuses));
1284: }
1285: PetscCall(PetscFree(recv_waits));
1286: PetscCall(PetscFree(nprocs));
1288: if (debug) { /* ----------------------------------- */
1289: cnt = 0;
1290: for (i = 0; i < nrecvs2; i++) {
1291: nt = recvs2[cnt++];
1292: for (j = 0; j < nt; j++) {
1293: PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1]));
1294: for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k]));
1295: cnt += 2 + recvs2[cnt + 1];
1296: PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1297: }
1298: }
1299: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1300: } /* ----------------------------------- */
1302: /* count number subdomains for each local node */
1303: PetscCall(PetscCalloc1(size, &nprocs));
1304: cnt = 0;
1305: for (i = 0; i < nrecvs2; i++) {
1306: nt = recvs2[cnt++];
1307: for (j = 0; j < nt; j++) {
1308: for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++;
1309: cnt += 2 + recvs2[cnt + 1];
1310: }
1311: }
1312: nt = 0;
1313: for (i = 0; i < size; i++) nt += (nprocs[i] > 0);
1314: *nproc = nt;
1315: PetscCall(PetscMalloc1(nt + 1, procs));
1316: PetscCall(PetscMalloc1(nt + 1, numprocs));
1317: PetscCall(PetscMalloc1(nt + 1, indices));
1318: for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL;
1319: PetscCall(PetscMalloc1(size, &bprocs));
1320: cnt = 0;
1321: for (i = 0; i < size; i++) {
1322: if (nprocs[i] > 0) {
1323: bprocs[i] = cnt;
1324: (*procs)[cnt] = i;
1325: (*numprocs)[cnt] = nprocs[i];
1326: PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt]));
1327: cnt++;
1328: }
1329: }
1331: /* make the list of subdomains for each nontrivial local node */
1332: PetscCall(PetscArrayzero(*numprocs, nt));
1333: cnt = 0;
1334: for (i = 0; i < nrecvs2; i++) {
1335: nt = recvs2[cnt++];
1336: for (j = 0; j < nt; j++) {
1337: for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt];
1338: cnt += 2 + recvs2[cnt + 1];
1339: }
1340: }
1341: PetscCall(PetscFree(bprocs));
1342: PetscCall(PetscFree(recvs2));
1344: /* sort the node indexing by their global numbers */
1345: nt = *nproc;
1346: for (i = 0; i < nt; i++) {
1347: PetscCall(PetscMalloc1((*numprocs)[i], &tmp));
1348: for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1349: PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i]));
1350: PetscCall(PetscFree(tmp));
1351: }
1353: if (debug) { /* ----------------------------------- */
1354: nt = *nproc;
1355: for (i = 0; i < nt; i++) {
1356: PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i]));
1357: for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j]));
1358: PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1359: }
1360: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1361: } /* ----------------------------------- */
1363: /* wait on sends */
1364: if (nsends2) {
1365: PetscCall(PetscMalloc1(nsends2, &send_status));
1366: PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status));
1367: PetscCall(PetscFree(send_status));
1368: }
1370: PetscCall(PetscFree(starts3));
1371: PetscCall(PetscFree(dest));
1372: PetscCall(PetscFree(send_waits));
1374: PetscCall(PetscFree(nownedsenders));
1375: PetscCall(PetscFree(ownedsenders));
1376: PetscCall(PetscFree(starts));
1377: PetscCall(PetscFree(starts2));
1378: PetscCall(PetscFree(lens2));
1380: PetscCall(PetscFree(source));
1381: PetscCall(PetscFree(len));
1382: PetscCall(PetscFree(recvs));
1383: PetscCall(PetscFree(nprocs));
1384: PetscCall(PetscFree(sends2));
1386: /* put the information about myself as the first entry in the list */
1387: first_procs = (*procs)[0];
1388: first_numprocs = (*numprocs)[0];
1389: first_indices = (*indices)[0];
1390: for (i = 0; i < *nproc; i++) {
1391: if ((*procs)[i] == rank) {
1392: (*procs)[0] = (*procs)[i];
1393: (*numprocs)[0] = (*numprocs)[i];
1394: (*indices)[0] = (*indices)[i];
1395: (*procs)[i] = first_procs;
1396: (*numprocs)[i] = first_numprocs;
1397: (*indices)[i] = first_indices;
1398: break;
1399: }
1400: }
1402: /* save info for reuse */
1403: mapping->info_nproc = *nproc;
1404: mapping->info_procs = *procs;
1405: mapping->info_numprocs = *numprocs;
1406: mapping->info_indices = *indices;
1407: mapping->info_cached = PETSC_TRUE;
1408: PetscFunctionReturn(PETSC_SUCCESS);
1409: }
1411: /*@C
1412: ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetBlockInfo()`
1414: Collective
1416: Input Parameter:
1417: . mapping - the mapping from local to global indexing
1419: Output Parameters:
1420: + nproc - number of processors that are connected to this one
1421: . proc - neighboring processors
1422: . numproc - number of indices for each processor
1423: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1425: Level: advanced
1427: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1428: `ISLocalToGlobalMappingGetInfo()`
1429: @*/
1430: PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1431: {
1432: PetscFunctionBegin;
1434: if (mapping->info_free) {
1435: PetscCall(PetscFree(*numprocs));
1436: if (*indices) {
1437: PetscInt i;
1439: PetscCall(PetscFree((*indices)[0]));
1440: for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i]));
1441: PetscCall(PetscFree(*indices));
1442: }
1443: }
1444: *nproc = 0;
1445: *procs = NULL;
1446: *numprocs = NULL;
1447: *indices = NULL;
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*@C
1452: ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1453: each index shared by more than one processor
1455: Collective
1457: Input Parameter:
1458: . mapping - the mapping from local to global indexing
1460: Output Parameters:
1461: + nproc - number of processors that are connected to this one
1462: . proc - neighboring processors
1463: . numproc - number of indices for each subdomain (processor)
1464: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1466: Level: advanced
1468: Note:
1469: The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1471: Fortran Usage:
1472: .vb
1473: PetscInt indices[nproc][numprocmax],ierr)
1474: ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1475: ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1476: .ve
1478: Fortran Note:
1479: There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and
1480: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1482: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1483: `ISLocalToGlobalMappingRestoreInfo()`
1484: @*/
1485: PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1486: {
1487: PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k;
1489: PetscFunctionBegin;
1491: bs = mapping->bs;
1492: PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices));
1493: if (bs > 1) { /* we need to expand the cached info */
1494: PetscCall(PetscCalloc1(*nproc, &*indices));
1495: PetscCall(PetscCalloc1(*nproc, &*numprocs));
1496: for (i = 0; i < *nproc; i++) {
1497: PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i]));
1498: for (j = 0; j < bnumprocs[i]; j++) {
1499: for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k;
1500: }
1501: (*numprocs)[i] = bnumprocs[i] * bs;
1502: }
1503: mapping->info_free = PETSC_TRUE;
1504: } else {
1505: *numprocs = bnumprocs;
1506: *indices = bindices;
1507: }
1508: PetscFunctionReturn(PETSC_SUCCESS);
1509: }
1511: /*@C
1512: ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetInfo()`
1514: Collective
1516: Input Parameter:
1517: . mapping - the mapping from local to global indexing
1519: Output Parameters:
1520: + nproc - number of processors that are connected to this one
1521: . proc - neighboring processors
1522: . numproc - number of indices for each processor
1523: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1525: Level: advanced
1527: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1528: `ISLocalToGlobalMappingGetInfo()`
1529: @*/
1530: PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1531: {
1532: PetscFunctionBegin;
1533: PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices));
1534: PetscFunctionReturn(PETSC_SUCCESS);
1535: }
1537: /*@C
1538: ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each MPI rank
1540: Collective
1542: Input Parameter:
1543: . mapping - the mapping from local to global indexing
1545: Output Parameters:
1546: + nnodes - number of local nodes (same `ISLocalToGlobalMappingGetSize()`)
1547: . count - number of neighboring processors per node
1548: - indices - indices of processes sharing the node (sorted)
1550: Level: advanced
1552: Note:
1553: The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1555: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1556: `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1557: @*/
1558: PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1559: {
1560: PetscInt n;
1562: PetscFunctionBegin;
1564: PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n));
1565: if (!mapping->info_nodec) {
1566: PetscInt i, m, n_neigh, *neigh, *n_shared, **shared;
1568: PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei));
1569: PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared));
1570: for (i = 0; i < n; i++) mapping->info_nodec[i] = 1;
1571: m = n;
1572: mapping->info_nodec[n] = 0;
1573: for (i = 1; i < n_neigh; i++) {
1574: PetscInt j;
1576: m += n_shared[i];
1577: for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1;
1578: }
1579: if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0]));
1580: for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1];
1581: PetscCall(PetscArrayzero(mapping->info_nodec, n));
1582: for (i = 0; i < n; i++) {
1583: mapping->info_nodec[i] = 1;
1584: mapping->info_nodei[i][0] = neigh[0];
1585: }
1586: for (i = 1; i < n_neigh; i++) {
1587: PetscInt j;
1589: for (j = 0; j < n_shared[i]; j++) {
1590: PetscInt k = shared[i][j];
1592: mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1593: mapping->info_nodec[k] += 1;
1594: }
1595: }
1596: for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i]));
1597: PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared));
1598: }
1599: if (nnodes) *nnodes = n;
1600: if (count) *count = mapping->info_nodec;
1601: if (indices) *indices = mapping->info_nodei;
1602: PetscFunctionReturn(PETSC_SUCCESS);
1603: }
1605: /*@C
1606: ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetNodeInfo()`
1608: Collective
1610: Input Parameter:
1611: . mapping - the mapping from local to global indexing
1613: Output Parameters:
1614: + nnodes - number of local nodes
1615: . count - number of neighboring processors per node
1616: - indices - indices of processes sharing the node (sorted)
1618: Level: advanced
1620: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1621: `ISLocalToGlobalMappingGetInfo()`
1622: @*/
1623: PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1624: {
1625: PetscFunctionBegin;
1627: if (nnodes) *nnodes = 0;
1628: if (count) *count = NULL;
1629: if (indices) *indices = NULL;
1630: PetscFunctionReturn(PETSC_SUCCESS);
1631: }
1633: /*MC
1634: ISLocalToGlobalMappingGetIndicesF90 - Get global indices for every local point that is mapped
1636: Synopsis:
1637: ISLocalToGlobalMappingGetIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr)
1639: Not Collective
1641: Input Parameter:
1642: . A - the matrix
1644: Output Parameters:
1645: . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1647: Level: advanced
1649: Note:
1650: Use `ISLocalToGlobalMappingGetIndicesF90()` when you no longer need access to the data
1652: .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingRestoreIndicesF90()`
1653: M*/
1655: /*MC
1656: ISLocalToGlobalMappingRestoreIndicesF90 - restores the global indices for every local point that is mapped obtained with `ISLocalToGlobalMappingGetIndicesF90()`
1658: Synopsis:
1659: ISLocalToGlobalMappingRestoreIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr)
1661: Not Collective
1663: Input Parameters:
1664: + A - the matrix
1665: - array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1667: Level: advanced
1669: .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetIndicesF90()`
1670: M*/
1672: /*@C
1673: ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1675: Not Collective
1677: Input Parameter:
1678: . ltog - local to global mapping
1680: Output Parameter:
1681: . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1683: Level: advanced
1685: Note:
1686: `ISLocalToGlobalMappingGetSize()` returns the length the this array
1688: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1689: @*/
1690: PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1691: {
1692: PetscFunctionBegin;
1695: if (ltog->bs == 1) {
1696: *array = ltog->indices;
1697: } else {
1698: PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs;
1699: const PetscInt *ii;
1701: PetscCall(PetscMalloc1(bs * n, &jj));
1702: *array = jj;
1703: k = 0;
1704: ii = ltog->indices;
1705: for (i = 0; i < n; i++)
1706: for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j;
1707: }
1708: PetscFunctionReturn(PETSC_SUCCESS);
1709: }
1711: /*@C
1712: ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with `ISLocalToGlobalMappingGetIndices()`
1714: Not Collective
1716: Input Parameters:
1717: + ltog - local to global mapping
1718: - array - array of indices
1720: Level: advanced
1722: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1723: @*/
1724: PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1725: {
1726: PetscFunctionBegin;
1729: PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer");
1731: if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array));
1732: PetscFunctionReturn(PETSC_SUCCESS);
1733: }
1735: /*@C
1736: ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1738: Not Collective
1740: Input Parameter:
1741: . ltog - local to global mapping
1743: Output Parameter:
1744: . array - array of indices
1746: Level: advanced
1748: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1749: @*/
1750: PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1751: {
1752: PetscFunctionBegin;
1755: *array = ltog->indices;
1756: PetscFunctionReturn(PETSC_SUCCESS);
1757: }
1759: /*@C
1760: ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with `ISLocalToGlobalMappingGetBlockIndices()`
1762: Not Collective
1764: Input Parameters:
1765: + ltog - local to global mapping
1766: - array - array of indices
1768: Level: advanced
1770: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1771: @*/
1772: PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1773: {
1774: PetscFunctionBegin;
1777: PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer");
1778: *array = NULL;
1779: PetscFunctionReturn(PETSC_SUCCESS);
1780: }
1782: /*@C
1783: ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1785: Not Collective
1787: Input Parameters:
1788: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1789: . n - number of mappings to concatenate
1790: - ltogs - local to global mappings
1792: Output Parameter:
1793: . ltogcat - new mapping
1795: Level: advanced
1797: Note:
1798: This currently always returns a mapping with block size of 1
1800: Developer Note:
1801: If all the input mapping have the same block size we could easily handle that as a special case
1803: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
1804: @*/
1805: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat)
1806: {
1807: PetscInt i, cnt, m, *idx;
1809: PetscFunctionBegin;
1810: PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n);
1814: for (cnt = 0, i = 0; i < n; i++) {
1815: PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m));
1816: cnt += m;
1817: }
1818: PetscCall(PetscMalloc1(cnt, &idx));
1819: for (cnt = 0, i = 0; i < n; i++) {
1820: const PetscInt *subidx;
1821: PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m));
1822: PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx));
1823: PetscCall(PetscArraycpy(&idx[cnt], subidx, m));
1824: PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx));
1825: cnt += m;
1826: }
1827: PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat));
1828: PetscFunctionReturn(PETSC_SUCCESS);
1829: }
1831: /*MC
1832: ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1833: used this is good for only small and moderate size problems.
1835: Options Database Keys:
1836: . -islocaltoglobalmapping_type basic - select this method
1838: Level: beginner
1840: Developer Note:
1841: This stores all the mapping information on each MPI rank.
1843: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1844: M*/
1845: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1846: {
1847: PetscFunctionBegin;
1848: ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic;
1849: ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic;
1850: ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1851: ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic;
1852: PetscFunctionReturn(PETSC_SUCCESS);
1853: }
1855: /*MC
1856: ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1857: used this is good for large memory problems.
1859: Options Database Keys:
1860: . -islocaltoglobalmapping_type hash - select this method
1862: Level: beginner
1864: Note:
1865: This is selected automatically for large problems if the user does not set the type.
1867: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGBASIC`
1868: M*/
1869: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1870: {
1871: PetscFunctionBegin;
1872: ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash;
1873: ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash;
1874: ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1875: ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash;
1876: PetscFunctionReturn(PETSC_SUCCESS);
1877: }
1879: /*@C
1880: ISLocalToGlobalMappingRegister - Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping`
1882: Not Collective
1884: Input Parameters:
1885: + sname - name of a new method
1886: - routine_create - routine to create method context
1888: Sample usage:
1889: .vb
1890: ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1891: .ve
1893: Then, your mapping can be chosen with the procedural interface via
1894: $ ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1895: or at runtime via the option
1896: $ -islocaltoglobalmapping_type my_mapper
1898: Level: advanced
1900: Note:
1901: `ISLocalToGlobalMappingRegister()` may be called multiple times to add several user-defined mappings.
1903: .seealso: [](sec_scatter), `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`,
1904: `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`
1905: @*/
1906: PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping))
1907: {
1908: PetscFunctionBegin;
1909: PetscCall(ISInitializePackage());
1910: PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function));
1911: PetscFunctionReturn(PETSC_SUCCESS);
1912: }
1914: /*@C
1915: ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use
1917: Logically Collective
1919: Input Parameters:
1920: + ltog - the `ISLocalToGlobalMapping` object
1921: - type - a known method
1923: Options Database Key:
1924: . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list of available methods (for instance, basic or hash)
1926: Level: intermediate
1928: Notes:
1929: See "petsc/include/petscis.h" for available methods
1931: Normally, it is best to use the `ISLocalToGlobalMappingSetFromOptions()` command and
1932: then set the `ISLocalToGlobalMappingType` from the options database rather than by using
1933: this routine.
1935: Developer Note:
1936: `ISLocalToGlobalMappingRegister()` is used to add new types to `ISLocalToGlobalMappingList` from which they
1937: are accessed by `ISLocalToGlobalMappingSetType()`.
1939: .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1940: @*/
1941: PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1942: {
1943: PetscBool match;
1944: PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1946: PetscFunctionBegin;
1950: PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match));
1951: if (match) PetscFunctionReturn(PETSC_SUCCESS);
1953: /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1954: if (type) {
1955: PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r));
1956: PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type);
1957: }
1958: /* Destroy the previous private LTOG context */
1959: PetscTryTypeMethod(ltog, destroy);
1960: ltog->ops->destroy = NULL;
1962: PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type));
1963: if (r) PetscCall((*r)(ltog));
1964: PetscFunctionReturn(PETSC_SUCCESS);
1965: }
1967: /*@C
1968: ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping`
1970: Not Collective
1972: Input Parameter:
1973: . ltog - the `ISLocalToGlobalMapping` object
1975: Output Parameter:
1976: . type - the type
1978: Level: intermediate
1980: .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1981: @*/
1982: PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1983: {
1984: PetscFunctionBegin;
1987: *type = ((PetscObject)ltog)->type_name;
1988: PetscFunctionReturn(PETSC_SUCCESS);
1989: }
1991: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1993: /*@C
1994: ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the `IS` package.
1996: Not Collective
1998: Level: advanced
2000: .seealso: [](sec_scatter), `ISRegister()`, `ISLocalToGlobalRegister()`
2001: @*/
2002: PetscErrorCode ISLocalToGlobalMappingRegisterAll(void)
2003: {
2004: PetscFunctionBegin;
2005: if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS);
2006: ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
2007: PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
2008: PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
2009: PetscFunctionReturn(PETSC_SUCCESS);
2010: }