Actual source code: isltog.c
petsc-3.7.7 2017-09-25
2: #include <petsc/private/isimpl.h> /*I "petscis.h" I*/
3: #include <petscsf.h>
4: #include <petscviewer.h>
6: PetscClassId IS_LTOGM_CLASSID;
7: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
12: PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
13: {
15: PetscInt i,start,end;
18: if (!mapping->globals) {
19: ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);
20: }
21: start = mapping->globalstart;
22: end = mapping->globalend;
23: for (i=0; i<n; i++) {
24: if (in[i] < 0) out[i] = in[i];
25: else if (in[i] < start) out[i] = -1;
26: else if (in[i] > end) out[i] = -1;
27: else out[i] = mapping->globals[in[i] - start];
28: }
29: return(0);
30: }
35: /*@
36: ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
38: Not Collective
40: Input Parameter:
41: . ltog - local to global mapping
43: Output Parameter:
44: . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
46: Level: advanced
48: Concepts: mapping^local to global
50: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51: @*/
52: PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
53: {
57: *n = mapping->bs*mapping->n;
58: return(0);
59: }
63: /*@C
64: ISLocalToGlobalMappingView - View a local to global mapping
66: Not Collective
68: Input Parameters:
69: + ltog - local to global mapping
70: - viewer - viewer
72: Level: advanced
74: Concepts: mapping^local to global
76: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
77: @*/
78: PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
79: {
80: PetscInt i;
81: PetscMPIInt rank;
82: PetscBool iascii;
87: if (!viewer) {
88: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
89: }
92: MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
93: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
94: if (iascii) {
95: PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
96: PetscViewerASCIIPushSynchronized(viewer);
97: for (i=0; i<mapping->n; i++) {
98: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
99: }
100: PetscViewerFlush(viewer);
101: PetscViewerASCIIPopSynchronized(viewer);
102: }
103: return(0);
104: }
108: /*@
109: ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
110: ordering and a global parallel ordering.
112: Not collective
114: Input Parameter:
115: . is - index set containing the global numbers for each local number
117: Output Parameter:
118: . mapping - new mapping data structure
120: Notes: the block size of the IS determines the block size of the mapping
121: Level: advanced
123: Concepts: mapping^local to global
125: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
126: @*/
127: PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
128: {
130: PetscInt n,bs;
131: const PetscInt *indices;
132: MPI_Comm comm;
133: PetscBool isblock;
139: PetscObjectGetComm((PetscObject)is,&comm);
140: ISGetLocalSize(is,&n);
141: PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
142: if (!isblock) {
143: ISGetIndices(is,&indices);
144: ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
145: ISRestoreIndices(is,&indices);
146: } else {
147: ISGetBlockSize(is,&bs);
148: ISBlockGetIndices(is,&indices);
149: ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
150: ISBlockRestoreIndices(is,&indices);
151: }
152: return(0);
153: }
157: /*@C
158: ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
159: ordering and a global parallel ordering.
161: Collective
163: Input Parameter:
164: + sf - star forest mapping contiguous local indices to (rank, offset)
165: - start - first global index on this process
167: Output Parameter:
168: . mapping - new mapping data structure
170: Level: advanced
172: Concepts: mapping^local to global
174: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
175: @*/
176: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
177: {
179: PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog;
180: const PetscInt *ilocal;
181: MPI_Comm comm;
187: PetscObjectGetComm((PetscObject)sf,&comm);
188: PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
189: if (ilocal) {
190: for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
191: }
192: else maxlocal = nleaves;
193: PetscMalloc1(nroots,&globals);
194: PetscMalloc1(maxlocal,<og);
195: for (i=0; i<nroots; i++) globals[i] = start + i;
196: for (i=0; i<maxlocal; i++) ltog[i] = -1;
197: PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);
198: PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);
199: ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
200: PetscFree(globals);
201: return(0);
202: }
206: /*@
207: ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
208: ordering and a global parallel ordering.
210: Not Collective
212: Input Parameters:
213: . mapping - mapping data structure
215: Output Parameter:
216: . bs - the blocksize
218: Level: advanced
220: Concepts: mapping^local to global
222: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
223: @*/
224: PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
225: {
228: *bs = mapping->bs;
229: return(0);
230: }
234: /*@
235: ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
236: ordering and a global parallel ordering.
238: Not Collective, but communicator may have more than one process
240: Input Parameters:
241: + comm - MPI communicator
242: . bs - the block size
243: . n - the number of local elements divided by the block size, or equivalently the number of block indices
244: . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
245: - mode - see PetscCopyMode
247: Output Parameter:
248: . mapping - new mapping data structure
250: Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
251: Level: advanced
253: Concepts: mapping^local to global
255: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
256: @*/
257: PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
258: {
260: PetscInt *in;
266: *mapping = NULL;
267: ISInitializePackage();
269: PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
270: comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
271: (*mapping)->n = n;
272: (*mapping)->bs = bs;
273: (*mapping)->info_cached = PETSC_FALSE;
274: (*mapping)->info_free = PETSC_FALSE;
275: (*mapping)->info_procs = NULL;
276: (*mapping)->info_numprocs = NULL;
277: (*mapping)->info_indices = NULL;
278: /*
279: Do not create the global to local mapping. This is only created if
280: ISGlobalToLocalMapping() is called
281: */
282: (*mapping)->globals = 0;
283: if (mode == PETSC_COPY_VALUES) {
284: PetscMalloc1(n,&in);
285: PetscMemcpy(in,indices,n*sizeof(PetscInt));
286: (*mapping)->indices = in;
287: PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
288: } else if (mode == PETSC_OWN_POINTER) {
289: (*mapping)->indices = (PetscInt*)indices;
290: PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
291: }
292: else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
293: PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);
294: return(0);
295: }
299: /*@
300: ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
301: ordering and a global parallel ordering.
303: Note Collective
305: Input Parameters:
306: . mapping - mapping data structure
308: Level: advanced
310: .seealso: ISLocalToGlobalMappingCreate()
311: @*/
312: PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
313: {
317: if (!*mapping) return(0);
319: if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;return(0);}
320: PetscFree((*mapping)->indices);
321: PetscFree((*mapping)->globals);
322: PetscFree((*mapping)->info_procs);
323: PetscFree((*mapping)->info_numprocs);
324: if ((*mapping)->info_indices) {
325: PetscInt i;
327: PetscFree(((*mapping)->info_indices)[0]);
328: for (i=1; i<(*mapping)->info_nproc; i++) {
329: PetscFree(((*mapping)->info_indices)[i]);
330: }
331: PetscFree((*mapping)->info_indices);
332: }
333: PetscHeaderDestroy(mapping);
334: *mapping = 0;
335: return(0);
336: }
340: /*@
341: ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
342: a new index set using the global numbering defined in an ISLocalToGlobalMapping
343: context.
345: Not collective
347: Input Parameters:
348: + mapping - mapping between local and global numbering
349: - is - index set in local numbering
351: Output Parameters:
352: . newis - index set in global numbering
354: Level: advanced
356: Concepts: mapping^local to global
358: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
359: ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
360: @*/
361: PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
362: {
364: PetscInt n,*idxout;
365: const PetscInt *idxin;
372: ISGetLocalSize(is,&n);
373: ISGetIndices(is,&idxin);
374: PetscMalloc1(n,&idxout);
375: ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
376: ISRestoreIndices(is,&idxin);
377: ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
378: return(0);
379: }
383: /*@
384: ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
385: and converts them to the global numbering.
387: Not collective
389: Input Parameters:
390: + mapping - the local to global mapping context
391: . N - number of integers
392: - in - input indices in local numbering
394: Output Parameter:
395: . out - indices in global numbering
397: Notes:
398: The in and out array parameters may be identical.
400: Level: advanced
402: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
403: ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
404: AOPetscToApplication(), ISGlobalToLocalMappingApply()
406: Concepts: mapping^local to global
407: @*/
408: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
409: {
410: PetscInt i,bs,Nmax;
414: bs = mapping->bs;
415: Nmax = bs*mapping->n;
416: if (bs == 1) {
417: const PetscInt *idx = mapping->indices;
418: for (i=0; i<N; i++) {
419: if (in[i] < 0) {
420: out[i] = in[i];
421: continue;
422: }
423: if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
424: out[i] = idx[in[i]];
425: }
426: } else {
427: const PetscInt *idx = mapping->indices;
428: for (i=0; i<N; i++) {
429: if (in[i] < 0) {
430: out[i] = in[i];
431: continue;
432: }
433: if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
434: out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
435: }
436: }
437: return(0);
438: }
442: /*@
443: ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
445: Not collective
447: Input Parameters:
448: + mapping - the local to global mapping context
449: . N - number of integers
450: - in - input indices in local block numbering
452: Output Parameter:
453: . out - indices in global block numbering
455: Notes:
456: The in and out array parameters may be identical.
458: Example:
459: If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
460: (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
462: Level: advanced
464: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
465: ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
466: AOPetscToApplication(), ISGlobalToLocalMappingApply()
468: Concepts: mapping^local to global
469: @*/
470: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
471: {
475: {
476: PetscInt i,Nmax = mapping->n;
477: const PetscInt *idx = mapping->indices;
479: for (i=0; i<N; i++) {
480: if (in[i] < 0) {
481: out[i] = in[i];
482: continue;
483: }
484: if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
485: out[i] = idx[in[i]];
486: }
487: }
488: return(0);
489: }
491: /* -----------------------------------------------------------------------------------------*/
495: /*
496: Creates the global fields in the ISLocalToGlobalMapping structure
497: */
498: static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
499: {
501: PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
504: end = 0;
505: start = PETSC_MAX_INT;
507: for (i=0; i<n; i++) {
508: if (idx[i] < 0) continue;
509: if (idx[i] < start) start = idx[i];
510: if (idx[i] > end) end = idx[i];
511: }
512: if (start > end) {start = 0; end = -1;}
513: mapping->globalstart = start;
514: mapping->globalend = end;
516: PetscMalloc1(end-start+2,&globals);
517: mapping->globals = globals;
518: for (i=0; i<end-start+1; i++) globals[i] = -1;
519: for (i=0; i<n; i++) {
520: if (idx[i] < 0) continue;
521: globals[idx[i] - start] = i;
522: }
524: PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
525: return(0);
526: }
530: /*@
531: ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
532: specified with a global numbering.
534: Not collective
536: Input Parameters:
537: + mapping - mapping between local and global numbering
538: . type - IS_GTOLM_MASK - replaces global indices with no local value with -1
539: IS_GTOLM_DROP - drops the indices with no local value from the output list
540: . n - number of global indices to map
541: - idx - global indices to map
543: Output Parameters:
544: + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
545: - idxout - local index of each global index, one must pass in an array long enough
546: to hold all the indices. You can call ISGlobalToLocalMappingApply() with
547: idxout == NULL to determine the required length (returned in nout)
548: and then allocate the required space and call ISGlobalToLocalMappingApply()
549: a second time to set the values.
551: Notes:
552: Either nout or idxout may be NULL. idx and idxout may be identical.
554: This is not scalable in memory usage. Each processor requires O(Nglobal) size
555: array to compute these.
557: Level: advanced
559: Developer Note: The manual page states that idx and idxout may be identical but the calling
560: sequence declares idx as const so it cannot be the same as idxout.
562: Concepts: mapping^global to local
564: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
565: ISLocalToGlobalMappingDestroy()
566: @*/
567: PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
568: PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
569: {
570: PetscInt i,*globals,nf = 0,tmp,start,end,bs;
575: if (!mapping->globals) {
576: ISGlobalToLocalMappingSetUp_Private(mapping);
577: }
578: globals = mapping->globals;
579: start = mapping->globalstart;
580: end = mapping->globalend;
581: bs = mapping->bs;
583: if (type == IS_GTOLM_MASK) {
584: if (idxout) {
585: for (i=0; i<n; i++) {
586: if (idx[i] < 0) idxout[i] = idx[i];
587: else if (idx[i] < bs*start) idxout[i] = -1;
588: else if (idx[i] > bs*(end+1)-1) idxout[i] = -1;
589: else idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
590: }
591: }
592: if (nout) *nout = n;
593: } else {
594: if (idxout) {
595: for (i=0; i<n; i++) {
596: if (idx[i] < 0) continue;
597: if (idx[i] < bs*start) continue;
598: if (idx[i] > bs*(end+1)-1) continue;
599: tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
600: if (tmp < 0) continue;
601: idxout[nf++] = tmp;
602: }
603: } else {
604: for (i=0; i<n; i++) {
605: if (idx[i] < 0) continue;
606: if (idx[i] < bs*start) continue;
607: if (idx[i] > bs*(end+1)-1) continue;
608: tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
609: if (tmp < 0) continue;
610: nf++;
611: }
612: }
613: if (nout) *nout = nf;
614: }
615: return(0);
616: }
620: /*@
621: ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
622: a new index set using the local numbering defined in an ISLocalToGlobalMapping
623: context.
625: Not collective
627: Input Parameters:
628: + mapping - mapping between local and global numbering
629: - is - index set in global numbering
631: Output Parameters:
632: . newis - index set in local numbering
634: Level: advanced
636: Concepts: mapping^local to global
638: .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
639: ISLocalToGlobalMappingDestroy()
640: @*/
641: PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
642: {
644: PetscInt n,nout,*idxout;
645: const PetscInt *idxin;
652: ISGetLocalSize(is,&n);
653: ISGetIndices(is,&idxin);
654: if (type == IS_GTOLM_MASK) {
655: PetscMalloc1(n,&idxout);
656: } else {
657: ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);
658: PetscMalloc1(nout,&idxout);
659: }
660: ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);
661: ISRestoreIndices(is,&idxin);
662: ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);
663: return(0);
664: }
668: /*@
669: ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
670: specified with a block global numbering.
672: Not collective
674: Input Parameters:
675: + mapping - mapping between local and global numbering
676: . type - IS_GTOLM_MASK - replaces global indices with no local value with -1
677: IS_GTOLM_DROP - drops the indices with no local value from the output list
678: . n - number of global indices to map
679: - idx - global indices to map
681: Output Parameters:
682: + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
683: - idxout - local index of each global index, one must pass in an array long enough
684: to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
685: idxout == NULL to determine the required length (returned in nout)
686: and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
687: a second time to set the values.
689: Notes:
690: Either nout or idxout may be NULL. idx and idxout may be identical.
692: This is not scalable in memory usage. Each processor requires O(Nglobal) size
693: array to compute these.
695: Level: advanced
697: Developer Note: The manual page states that idx and idxout may be identical but the calling
698: sequence declares idx as const so it cannot be the same as idxout.
700: Concepts: mapping^global to local
702: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
703: ISLocalToGlobalMappingDestroy()
704: @*/
705: PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
706: PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
707: {
708: PetscInt i,*globals,nf = 0,tmp,start,end;
713: if (!mapping->globals) {
714: ISGlobalToLocalMappingSetUp_Private(mapping);
715: }
716: globals = mapping->globals;
717: start = mapping->globalstart;
718: end = mapping->globalend;
720: if (type == IS_GTOLM_MASK) {
721: if (idxout) {
722: for (i=0; i<n; i++) {
723: if (idx[i] < 0) idxout[i] = idx[i];
724: else if (idx[i] < start) idxout[i] = -1;
725: else if (idx[i] > end) idxout[i] = -1;
726: else idxout[i] = globals[idx[i] - start];
727: }
728: }
729: if (nout) *nout = n;
730: } else {
731: if (idxout) {
732: for (i=0; i<n; i++) {
733: if (idx[i] < 0) continue;
734: if (idx[i] < start) continue;
735: if (idx[i] > end) continue;
736: tmp = globals[idx[i] - start];
737: if (tmp < 0) continue;
738: idxout[nf++] = tmp;
739: }
740: } else {
741: for (i=0; i<n; i++) {
742: if (idx[i] < 0) continue;
743: if (idx[i] < start) continue;
744: if (idx[i] > end) continue;
745: tmp = globals[idx[i] - start];
746: if (tmp < 0) continue;
747: nf++;
748: }
749: }
750: if (nout) *nout = nf;
751: }
752: return(0);
753: }
757: /*@C
758: ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
759: each index shared by more than one processor
761: Collective on ISLocalToGlobalMapping
763: Input Parameters:
764: . mapping - the mapping from local to global indexing
766: Output Parameter:
767: + nproc - number of processors that are connected to this one
768: . proc - neighboring processors
769: . numproc - number of indices for each subdomain (processor)
770: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
772: Level: advanced
774: Concepts: mapping^local to global
776: Fortran Usage:
777: $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
778: $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
779: PetscInt indices[nproc][numprocmax],ierr)
780: There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
781: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
784: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
785: ISLocalToGlobalMappingRestoreInfo()
786: @*/
787: PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
788: {
793: if (mapping->info_cached) {
794: *nproc = mapping->info_nproc;
795: *procs = mapping->info_procs;
796: *numprocs = mapping->info_numprocs;
797: *indices = mapping->info_indices;
798: } else {
799: ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);
800: }
801: return(0);
802: }
806: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
807: {
809: PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex;
810: PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
811: PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
812: PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
813: PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
814: PetscInt first_procs,first_numprocs,*first_indices;
815: MPI_Request *recv_waits,*send_waits;
816: MPI_Status recv_status,*send_status,*recv_statuses;
817: MPI_Comm comm;
818: PetscBool debug = PETSC_FALSE;
821: PetscObjectGetComm((PetscObject)mapping,&comm);
822: MPI_Comm_size(comm,&size);
823: MPI_Comm_rank(comm,&rank);
824: if (size == 1) {
825: *nproc = 0;
826: *procs = NULL;
827: PetscMalloc(sizeof(PetscInt),numprocs);
828: (*numprocs)[0] = 0;
829: PetscMalloc(sizeof(PetscInt*),indices);
830: (*indices)[0] = NULL;
831: /* save info for reuse */
832: mapping->info_nproc = *nproc;
833: mapping->info_procs = *procs;
834: mapping->info_numprocs = *numprocs;
835: mapping->info_indices = *indices;
836: mapping->info_cached = PETSC_TRUE;
837: return(0);
838: }
840: PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);
842: /*
843: Notes on ISLocalToGlobalMappingGetBlockInfo
845: globally owned node - the nodes that have been assigned to this processor in global
846: numbering, just for this routine.
848: nontrivial globally owned node - node assigned to this processor that is on a subdomain
849: boundary (i.e. is has more than one local owner)
851: locally owned node - node that exists on this processors subdomain
853: nontrivial locally owned node - node that is not in the interior (i.e. has more than one
854: local subdomain
855: */
856: PetscObjectGetNewTag((PetscObject)mapping,&tag1);
857: PetscObjectGetNewTag((PetscObject)mapping,&tag2);
858: PetscObjectGetNewTag((PetscObject)mapping,&tag3);
860: for (i=0; i<n; i++) {
861: if (lindices[i] > max) max = lindices[i];
862: }
863: MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
864: Ng++;
865: MPI_Comm_size(comm,&size);
866: MPI_Comm_rank(comm,&rank);
867: scale = Ng/size + 1;
868: ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
869: rstart = scale*rank;
871: /* determine ownership ranges of global indices */
872: PetscMalloc1(2*size,&nprocs);
873: PetscMemzero(nprocs,2*size*sizeof(PetscInt));
875: /* determine owners of each local node */
876: PetscMalloc1(n,&owner);
877: for (i=0; i<n; i++) {
878: proc = lindices[i]/scale; /* processor that globally owns this index */
879: nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */
880: owner[i] = proc;
881: nprocs[2*proc]++; /* count of how many that processor globally owns of ours */
882: }
883: nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
884: PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);
886: /* inform other processors of number of messages and max length*/
887: PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
888: PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);
890: /* post receives for owned rows */
891: PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
892: PetscMalloc1(nrecvs+1,&recv_waits);
893: for (i=0; i<nrecvs; i++) {
894: MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
895: }
897: /* pack messages containing lists of local nodes to owners */
898: PetscMalloc1(2*n+1,&sends);
899: PetscMalloc1(size+1,&starts);
900: starts[0] = 0;
901: for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
902: for (i=0; i<n; i++) {
903: sends[starts[owner[i]]++] = lindices[i];
904: sends[starts[owner[i]]++] = i;
905: }
906: PetscFree(owner);
907: starts[0] = 0;
908: for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
910: /* send the messages */
911: PetscMalloc1(nsends+1,&send_waits);
912: PetscMalloc1(nsends+1,&dest);
913: cnt = 0;
914: for (i=0; i<size; i++) {
915: if (nprocs[2*i]) {
916: MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
917: dest[cnt] = i;
918: cnt++;
919: }
920: }
921: PetscFree(starts);
923: /* wait on receives */
924: PetscMalloc1(nrecvs+1,&source);
925: PetscMalloc1(nrecvs+1,&len);
926: cnt = nrecvs;
927: PetscMalloc1(ng+1,&nownedsenders);
928: PetscMemzero(nownedsenders,ng*sizeof(PetscInt));
929: while (cnt) {
930: MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
931: /* unpack receives into our local space */
932: MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
933: source[imdex] = recv_status.MPI_SOURCE;
934: len[imdex] = len[imdex]/2;
935: /* count how many local owners for each of my global owned indices */
936: for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
937: cnt--;
938: }
939: PetscFree(recv_waits);
941: /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
942: nowned = 0;
943: nownedm = 0;
944: for (i=0; i<ng; i++) {
945: if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
946: }
948: /* create single array to contain rank of all local owners of each globally owned index */
949: PetscMalloc1(nownedm+1,&ownedsenders);
950: PetscMalloc1(ng+1,&starts);
951: starts[0] = 0;
952: for (i=1; i<ng; i++) {
953: if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
954: else starts[i] = starts[i-1];
955: }
957: /* for each nontrival globally owned node list all arriving processors */
958: for (i=0; i<nrecvs; i++) {
959: for (j=0; j<len[i]; j++) {
960: node = recvs[2*i*nmax+2*j]-rstart;
961: if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
962: }
963: }
965: if (debug) { /* ----------------------------------- */
966: starts[0] = 0;
967: for (i=1; i<ng; i++) {
968: if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
969: else starts[i] = starts[i-1];
970: }
971: for (i=0; i<ng; i++) {
972: if (nownedsenders[i] > 1) {
973: PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
974: for (j=0; j<nownedsenders[i]; j++) {
975: PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
976: }
977: PetscSynchronizedPrintf(comm,"\n");
978: }
979: }
980: PetscSynchronizedFlush(comm,PETSC_STDOUT);
981: } /* ----------------------------------- */
983: /* wait on original sends */
984: if (nsends) {
985: PetscMalloc1(nsends,&send_status);
986: MPI_Waitall(nsends,send_waits,send_status);
987: PetscFree(send_status);
988: }
989: PetscFree(send_waits);
990: PetscFree(sends);
991: PetscFree(nprocs);
993: /* pack messages to send back to local owners */
994: starts[0] = 0;
995: for (i=1; i<ng; i++) {
996: if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
997: else starts[i] = starts[i-1];
998: }
999: nsends2 = nrecvs;
1000: PetscMalloc1(nsends2+1,&nprocs); /* length of each message */
1001: for (i=0; i<nrecvs; i++) {
1002: nprocs[i] = 1;
1003: for (j=0; j<len[i]; j++) {
1004: node = recvs[2*i*nmax+2*j]-rstart;
1005: if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1006: }
1007: }
1008: nt = 0;
1009: for (i=0; i<nsends2; i++) nt += nprocs[i];
1011: PetscMalloc1(nt+1,&sends2);
1012: PetscMalloc1(nsends2+1,&starts2);
1014: starts2[0] = 0;
1015: for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1016: /*
1017: Each message is 1 + nprocs[i] long, and consists of
1018: (0) the number of nodes being sent back
1019: (1) the local node number,
1020: (2) the number of processors sharing it,
1021: (3) the processors sharing it
1022: */
1023: for (i=0; i<nsends2; i++) {
1024: cnt = 1;
1025: sends2[starts2[i]] = 0;
1026: for (j=0; j<len[i]; j++) {
1027: node = recvs[2*i*nmax+2*j]-rstart;
1028: if (nownedsenders[node] > 1) {
1029: sends2[starts2[i]]++;
1030: sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1031: sends2[starts2[i]+cnt++] = nownedsenders[node];
1032: PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));
1033: cnt += nownedsenders[node];
1034: }
1035: }
1036: }
1038: /* receive the message lengths */
1039: nrecvs2 = nsends;
1040: PetscMalloc1(nrecvs2+1,&lens2);
1041: PetscMalloc1(nrecvs2+1,&starts3);
1042: PetscMalloc1(nrecvs2+1,&recv_waits);
1043: for (i=0; i<nrecvs2; i++) {
1044: MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
1045: }
1047: /* send the message lengths */
1048: for (i=0; i<nsends2; i++) {
1049: MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
1050: }
1052: /* wait on receives of lens */
1053: if (nrecvs2) {
1054: PetscMalloc1(nrecvs2,&recv_statuses);
1055: MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1056: PetscFree(recv_statuses);
1057: }
1058: PetscFree(recv_waits);
1060: starts3[0] = 0;
1061: nt = 0;
1062: for (i=0; i<nrecvs2-1; i++) {
1063: starts3[i+1] = starts3[i] + lens2[i];
1064: nt += lens2[i];
1065: }
1066: if (nrecvs2) nt += lens2[nrecvs2-1];
1068: PetscMalloc1(nt+1,&recvs2);
1069: PetscMalloc1(nrecvs2+1,&recv_waits);
1070: for (i=0; i<nrecvs2; i++) {
1071: MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
1072: }
1074: /* send the messages */
1075: PetscMalloc1(nsends2+1,&send_waits);
1076: for (i=0; i<nsends2; i++) {
1077: MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
1078: }
1080: /* wait on receives */
1081: if (nrecvs2) {
1082: PetscMalloc1(nrecvs2,&recv_statuses);
1083: MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
1084: PetscFree(recv_statuses);
1085: }
1086: PetscFree(recv_waits);
1087: PetscFree(nprocs);
1089: if (debug) { /* ----------------------------------- */
1090: cnt = 0;
1091: for (i=0; i<nrecvs2; i++) {
1092: nt = recvs2[cnt++];
1093: for (j=0; j<nt; j++) {
1094: PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
1095: for (k=0; k<recvs2[cnt+1]; k++) {
1096: PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
1097: }
1098: cnt += 2 + recvs2[cnt+1];
1099: PetscSynchronizedPrintf(comm,"\n");
1100: }
1101: }
1102: PetscSynchronizedFlush(comm,PETSC_STDOUT);
1103: } /* ----------------------------------- */
1105: /* count number subdomains for each local node */
1106: PetscMalloc1(size,&nprocs);
1107: PetscMemzero(nprocs,size*sizeof(PetscInt));
1108: cnt = 0;
1109: for (i=0; i<nrecvs2; i++) {
1110: nt = recvs2[cnt++];
1111: for (j=0; j<nt; j++) {
1112: for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1113: cnt += 2 + recvs2[cnt+1];
1114: }
1115: }
1116: nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1117: *nproc = nt;
1118: PetscMalloc1(nt+1,procs);
1119: PetscMalloc1(nt+1,numprocs);
1120: PetscMalloc1(nt+1,indices);
1121: for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1122: PetscMalloc1(size,&bprocs);
1123: cnt = 0;
1124: for (i=0; i<size; i++) {
1125: if (nprocs[i] > 0) {
1126: bprocs[i] = cnt;
1127: (*procs)[cnt] = i;
1128: (*numprocs)[cnt] = nprocs[i];
1129: PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1130: cnt++;
1131: }
1132: }
1134: /* make the list of subdomains for each nontrivial local node */
1135: PetscMemzero(*numprocs,nt*sizeof(PetscInt));
1136: cnt = 0;
1137: for (i=0; i<nrecvs2; i++) {
1138: nt = recvs2[cnt++];
1139: for (j=0; j<nt; j++) {
1140: for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1141: cnt += 2 + recvs2[cnt+1];
1142: }
1143: }
1144: PetscFree(bprocs);
1145: PetscFree(recvs2);
1147: /* sort the node indexing by their global numbers */
1148: nt = *nproc;
1149: for (i=0; i<nt; i++) {
1150: PetscMalloc1((*numprocs)[i],&tmp);
1151: for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1152: PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1153: PetscFree(tmp);
1154: }
1156: if (debug) { /* ----------------------------------- */
1157: nt = *nproc;
1158: for (i=0; i<nt; i++) {
1159: PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1160: for (j=0; j<(*numprocs)[i]; j++) {
1161: PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1162: }
1163: PetscSynchronizedPrintf(comm,"\n");
1164: }
1165: PetscSynchronizedFlush(comm,PETSC_STDOUT);
1166: } /* ----------------------------------- */
1168: /* wait on sends */
1169: if (nsends2) {
1170: PetscMalloc1(nsends2,&send_status);
1171: MPI_Waitall(nsends2,send_waits,send_status);
1172: PetscFree(send_status);
1173: }
1175: PetscFree(starts3);
1176: PetscFree(dest);
1177: PetscFree(send_waits);
1179: PetscFree(nownedsenders);
1180: PetscFree(ownedsenders);
1181: PetscFree(starts);
1182: PetscFree(starts2);
1183: PetscFree(lens2);
1185: PetscFree(source);
1186: PetscFree(len);
1187: PetscFree(recvs);
1188: PetscFree(nprocs);
1189: PetscFree(sends2);
1191: /* put the information about myself as the first entry in the list */
1192: first_procs = (*procs)[0];
1193: first_numprocs = (*numprocs)[0];
1194: first_indices = (*indices)[0];
1195: for (i=0; i<*nproc; i++) {
1196: if ((*procs)[i] == rank) {
1197: (*procs)[0] = (*procs)[i];
1198: (*numprocs)[0] = (*numprocs)[i];
1199: (*indices)[0] = (*indices)[i];
1200: (*procs)[i] = first_procs;
1201: (*numprocs)[i] = first_numprocs;
1202: (*indices)[i] = first_indices;
1203: break;
1204: }
1205: }
1207: /* save info for reuse */
1208: mapping->info_nproc = *nproc;
1209: mapping->info_procs = *procs;
1210: mapping->info_numprocs = *numprocs;
1211: mapping->info_indices = *indices;
1212: mapping->info_cached = PETSC_TRUE;
1213: return(0);
1214: }
1218: /*@C
1219: ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1221: Collective on ISLocalToGlobalMapping
1223: Input Parameters:
1224: . mapping - the mapping from local to global indexing
1226: Output Parameter:
1227: + nproc - number of processors that are connected to this one
1228: . proc - neighboring processors
1229: . numproc - number of indices for each processor
1230: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1232: Level: advanced
1234: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1235: ISLocalToGlobalMappingGetInfo()
1236: @*/
1237: PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1238: {
1243: if (mapping->info_free) {
1244: PetscFree(*numprocs);
1245: if (*indices) {
1246: PetscInt i;
1248: PetscFree((*indices)[0]);
1249: for (i=1; i<*nproc; i++) {
1250: PetscFree((*indices)[i]);
1251: }
1252: PetscFree(*indices);
1253: }
1254: }
1255: *nproc = 0;
1256: *procs = NULL;
1257: *numprocs = NULL;
1258: *indices = NULL;
1259: return(0);
1260: }
1264: /*@C
1265: ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1266: each index shared by more than one processor
1268: Collective on ISLocalToGlobalMapping
1270: Input Parameters:
1271: . mapping - the mapping from local to global indexing
1273: Output Parameter:
1274: + nproc - number of processors that are connected to this one
1275: . proc - neighboring processors
1276: . numproc - number of indices for each subdomain (processor)
1277: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1279: Level: advanced
1281: Concepts: mapping^local to global
1283: Fortran Usage:
1284: $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1285: $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1286: PetscInt indices[nproc][numprocmax],ierr)
1287: There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1288: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1291: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1292: ISLocalToGlobalMappingRestoreInfo()
1293: @*/
1294: PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1295: {
1297: PetscInt **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1301: ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);
1302: if (bs > 1) { /* we need to expand the cached info */
1303: PetscCalloc1(*nproc,&*indices);
1304: PetscCalloc1(*nproc,&*numprocs);
1305: for (i=0; i<*nproc; i++) {
1306: PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);
1307: for (j=0; j<bnumprocs[i]; j++) {
1308: for (k=0; k<bs; k++) {
1309: (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1310: }
1311: }
1312: (*numprocs)[i] = bnumprocs[i]*bs;
1313: }
1314: mapping->info_free = PETSC_TRUE;
1315: } else {
1316: *numprocs = bnumprocs;
1317: *indices = bindices;
1318: }
1319: return(0);
1320: }
1324: /*@C
1325: ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1327: Collective on ISLocalToGlobalMapping
1329: Input Parameters:
1330: . mapping - the mapping from local to global indexing
1332: Output Parameter:
1333: + nproc - number of processors that are connected to this one
1334: . proc - neighboring processors
1335: . numproc - number of indices for each processor
1336: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1338: Level: advanced
1340: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1341: ISLocalToGlobalMappingGetInfo()
1342: @*/
1343: PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1344: {
1348: ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1349: return(0);
1350: }
1354: /*@C
1355: ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1357: Not Collective
1359: Input Arguments:
1360: . ltog - local to global mapping
1362: Output Arguments:
1363: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1365: Level: advanced
1367: Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1369: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1370: @*/
1371: PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1372: {
1376: if (ltog->bs == 1) {
1377: *array = ltog->indices;
1378: } else {
1379: PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1380: const PetscInt *ii;
1383: PetscMalloc1(bs*n,&jj);
1384: *array = jj;
1385: k = 0;
1386: ii = ltog->indices;
1387: for (i=0; i<n; i++)
1388: for (j=0; j<bs; j++)
1389: jj[k++] = bs*ii[i] + j;
1390: }
1391: return(0);
1392: }
1396: /*@C
1397: ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1399: Not Collective
1401: Input Arguments:
1402: + ltog - local to global mapping
1403: - array - array of indices
1405: Level: advanced
1407: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1408: @*/
1409: PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1410: {
1414: if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1416: if (ltog->bs > 1) {
1418: PetscFree(*(void**)array);
1419: }
1420: return(0);
1421: }
1425: /*@C
1426: ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1428: Not Collective
1430: Input Arguments:
1431: . ltog - local to global mapping
1433: Output Arguments:
1434: . array - array of indices
1436: Level: advanced
1438: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1439: @*/
1440: PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1441: {
1445: *array = ltog->indices;
1446: return(0);
1447: }
1451: /*@C
1452: ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1454: Not Collective
1456: Input Arguments:
1457: + ltog - local to global mapping
1458: - array - array of indices
1460: Level: advanced
1462: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1463: @*/
1464: PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1465: {
1469: if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1470: *array = NULL;
1471: return(0);
1472: }
1476: /*@C
1477: ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1479: Not Collective
1481: Input Arguments:
1482: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1483: . n - number of mappings to concatenate
1484: - ltogs - local to global mappings
1486: Output Arguments:
1487: . ltogcat - new mapping
1489: Note: this currently always returns a mapping with block size of 1
1491: Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1493: Level: advanced
1495: .seealso: ISLocalToGlobalMappingCreate()
1496: @*/
1497: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1498: {
1499: PetscInt i,cnt,m,*idx;
1503: if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1507: for (cnt=0,i=0; i<n; i++) {
1508: ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1509: cnt += m;
1510: }
1511: PetscMalloc1(cnt,&idx);
1512: for (cnt=0,i=0; i<n; i++) {
1513: const PetscInt *subidx;
1514: ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1515: ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1516: PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));
1517: ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1518: cnt += m;
1519: }
1520: ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1521: return(0);
1522: }