Actual source code: index.c
petsc-3.11.4 2019-09-28
2: /*
3: Defines the abstract operations on index sets, i.e. the public interface.
4: */
5: #include <petsc/private/isimpl.h>
6: #include <petscviewer.h>
7: #include <petscsf.h>
9: /* Logging support */
10: PetscClassId IS_CLASSID;
12: /*@
13: ISRenumber - Renumbers an index set (with multiplicities) in a contiguous way.
15: Collective on IS
17: Input Parmeters:
18: . subset - the index set
19: . subset_mult - the multiplcity of each entry in subset (optional, can be NULL)
21: Output Parameters:
22: . N - the maximum entry of the new IS
23: . subset_n - the new IS
25: Level: intermediate
27: .seealso:
28: @*/
29: PetscErrorCode ISRenumber(IS subset, IS subset_mult, PetscInt *N, IS *subset_n)
30: {
31: PetscSF sf;
32: PetscLayout map;
33: const PetscInt *idxs;
34: PetscInt *leaf_data,*root_data,*gidxs;
35: PetscInt N_n,n,i,lbounds[2],gbounds[2],Nl;
36: PetscInt n_n,nlocals,start,first_index;
37: PetscMPIInt commsize;
38: PetscBool first_found;
42: ISGetLocalSize(subset,&n);
43: if (subset_mult) {
45: ISGetLocalSize(subset,&i);
46: if (i != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local subset and multiplicity sizes don't match! %d != %d",n,i);
47: }
48: /* create workspace layout for computing global indices of subset */
49: ISGetIndices(subset,&idxs);
50: lbounds[0] = lbounds[1] = 0;
51: for (i=0;i<n;i++) {
52: if (idxs[i] < lbounds[0]) lbounds[0] = idxs[i];
53: else if (idxs[i] > lbounds[1]) lbounds[1] = idxs[i];
54: }
55: lbounds[0] = -lbounds[0];
56: MPIU_Allreduce(lbounds,gbounds,2,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)subset));
57: gbounds[0] = -gbounds[0];
58: N_n= gbounds[1] - gbounds[0] + 1;
59: PetscLayoutCreate(PetscObjectComm((PetscObject)subset),&map);
60: PetscLayoutSetBlockSize(map,1);
61: PetscLayoutSetSize(map,N_n);
62: PetscLayoutSetUp(map);
63: PetscLayoutGetLocalSize(map,&Nl);
65: /* create sf : leaf_data == multiplicity of indexes, root data == global index in layout */
66: PetscMalloc2(n,&leaf_data,Nl,&root_data);
67: if (subset_mult) {
68: const PetscInt* idxs_mult;
70: ISGetIndices(subset_mult,&idxs_mult);
71: PetscMemcpy(leaf_data,idxs_mult,n*sizeof(PetscInt));
72: ISRestoreIndices(subset_mult,&idxs_mult);
73: } else {
74: for (i=0;i<n;i++) leaf_data[i] = 1;
75: }
76: /* local size of new subset */
77: n_n = 0;
78: for (i=0;i<n;i++) n_n += leaf_data[i];
80: /* global indexes in layout */
81: PetscMalloc1(n_n,&gidxs); /* allocating possibly extra space in gidxs which will be used later */
82: for (i=0;i<n;i++) gidxs[i] = idxs[i] - gbounds[0];
83: ISRestoreIndices(subset,&idxs);
84: PetscSFCreate(PetscObjectComm((PetscObject)subset),&sf);
85: PetscSFSetGraphLayout(sf,map,n,NULL,PETSC_COPY_VALUES,gidxs);
86: PetscLayoutDestroy(&map);
88: /* reduce from leaves to roots */
89: PetscMemzero(root_data,Nl*sizeof(PetscInt));
90: PetscSFReduceBegin(sf,MPIU_INT,leaf_data,root_data,MPI_MAX);
91: PetscSFReduceEnd(sf,MPIU_INT,leaf_data,root_data,MPI_MAX);
93: /* count indexes in local part of layout */
94: nlocals = 0;
95: first_index = -1;
96: first_found = PETSC_FALSE;
97: for (i=0;i<Nl;i++) {
98: if (!first_found && root_data[i]) {
99: first_found = PETSC_TRUE;
100: first_index = i;
101: }
102: nlocals += root_data[i];
103: }
105: /* cumulative of number of indexes and size of subset without holes */
106: #if defined(PETSC_HAVE_MPI_EXSCAN)
107: start = 0;
108: MPI_Exscan(&nlocals,&start,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)subset));
109: #else
110: MPI_Scan(&nlocals,&start,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)subset));
111: start = start-nlocals;
112: #endif
114: if (N) { /* compute total size of new subset if requested */
115: *N = start + nlocals;
116: MPI_Comm_size(PetscObjectComm((PetscObject)subset),&commsize);
117: MPI_Bcast(N,1,MPIU_INT,commsize-1,PetscObjectComm((PetscObject)subset));
118: }
120: /* adapt root data with cumulative */
121: if (first_found) {
122: PetscInt old_index;
124: root_data[first_index] += start;
125: old_index = first_index;
126: for (i=first_index+1;i<Nl;i++) {
127: if (root_data[i]) {
128: root_data[i] += root_data[old_index];
129: old_index = i;
130: }
131: }
132: }
134: /* from roots to leaves */
135: PetscSFBcastBegin(sf,MPIU_INT,root_data,leaf_data);
136: PetscSFBcastEnd(sf,MPIU_INT,root_data,leaf_data);
137: PetscSFDestroy(&sf);
139: /* create new IS with global indexes without holes */
140: if (subset_mult) {
141: const PetscInt* idxs_mult;
142: PetscInt cum;
144: cum = 0;
145: ISGetIndices(subset_mult,&idxs_mult);
146: for (i=0;i<n;i++) {
147: PetscInt j;
148: for (j=0;j<idxs_mult[i];j++) gidxs[cum++] = leaf_data[i] - idxs_mult[i] + j;
149: }
150: ISRestoreIndices(subset_mult,&idxs_mult);
151: } else {
152: for (i=0;i<n;i++) {
153: gidxs[i] = leaf_data[i]-1;
154: }
155: }
156: ISCreateGeneral(PetscObjectComm((PetscObject)subset),n_n,gidxs,PETSC_OWN_POINTER,subset_n);
157: PetscFree2(leaf_data,root_data);
158: return(0);
159: }
162: /*@
163: ISCreateSubIS - Create a sub index set from a global index set selecting some components.
165: Collective on IS
167: Input Parmeters:
168: . is - the index set
169: . comps - which components we will extract from is
171: Output Parameters:
172: . subis - the new sub index set
174: Level: intermediate
176: Example usage:
177: We have an index set (is) living on 3 processes with the following values:
178: | 4 9 0 | 2 6 7 | 10 11 1|
179: and another index set (comps) used to indicate which components of is we want to take,
180: | 7 5 | 1 2 | 0 4|
181: The output index set (subis) should look like:
182: | 11 7 | 9 0 | 4 6|
184: .seealso: VecGetSubVector(), MatCreateSubMatrix()
185: @*/
186: PetscErrorCode ISCreateSubIS(IS is,IS comps,IS *subis)
187: {
188: PetscSF sf;
189: const PetscInt *is_indices,*comps_indices;
190: PetscInt *subis_indices,nroots,nleaves,*mine,i,owner,lidx;
191: PetscSFNode *remote;
192: PetscErrorCode ierr;
193: MPI_Comm comm;
200: PetscObjectGetComm((PetscObject)is, &comm);
201: ISGetLocalSize(comps,&nleaves);
202: ISGetLocalSize(is,&nroots);
203: PetscMalloc1(nleaves,&remote);
204: PetscMalloc1(nleaves,&mine);
205: ISGetIndices(comps,&comps_indices);
206: /*
207: * Construct a PetscSF in which "is" data serves as roots and "subis" is leaves.
208: * Root data are sent to leaves using PetscSFBcast().
209: * */
210: for (i=0; i<nleaves; i++) {
211: mine[i] = i;
212: /* Connect a remote root with the current leaf. The value on the remote root
213: * will be received by the current local leaf.
214: * */
215: owner = -1;
216: lidx = -1;
217: PetscLayoutFindOwnerIndex(is->map,comps_indices[i],&owner, &lidx);
218: remote[i].rank = owner;
219: remote[i].index = lidx;
220: }
221: ISRestoreIndices(comps,&comps_indices);
222: PetscSFCreate(comm,&sf);
223: PetscSFSetFromOptions(sf);\
224: PetscSFSetGraph(sf,nroots,nleaves,mine,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
226: PetscMalloc1(nleaves,&subis_indices);
227: ISGetIndices(is, &is_indices);
228: PetscSFBcastBegin(sf,MPIU_INT,is_indices,subis_indices);
229: PetscSFBcastEnd(sf,MPIU_INT,is_indices,subis_indices);
230: ISRestoreIndices(is,&is_indices);
231: PetscSFDestroy(&sf);
232: ISCreateGeneral(comm,nleaves,subis_indices,PETSC_OWN_POINTER,subis);
233: return(0);
234: }
237: /*@
238: ISIdentity - Determines whether index set is the identity mapping.
240: Collective on IS
242: Input Parmeters:
243: . is - the index set
245: Output Parameters:
246: . ident - PETSC_TRUE if an identity, else PETSC_FALSE
248: Level: intermediate
250: Concepts: identity mapping
251: Concepts: index sets^is identity
253: .seealso: ISSetIdentity()
254: @*/
255: PetscErrorCode ISIdentity(IS is,PetscBool *ident)
256: {
262: *ident = is->isidentity;
263: if (*ident) return(0);
264: if (is->ops->identity) {
265: (*is->ops->identity)(is,ident);
266: }
267: return(0);
268: }
270: /*@
271: ISSetIdentity - Informs the index set that it is an identity.
273: Logically Collective on IS
275: Input Parmeters:
276: . is - the index set
278: Level: intermediate
280: Concepts: identity mapping
281: Concepts: index sets^is identity
283: .seealso: ISIdentity()
284: @*/
285: PetscErrorCode ISSetIdentity(IS is)
286: {
291: is->isidentity = PETSC_TRUE;
292: ISSetPermutation(is);
293: return(0);
294: }
296: /*@
297: ISContiguousLocal - Locates an index set with contiguous range within a global range, if possible
299: Not Collective
301: Input Parmeters:
302: + is - the index set
303: . gstart - global start
304: . gend - global end
306: Output Parameters:
307: + start - start of contiguous block, as an offset from gstart
308: - contig - PETSC_TRUE if the index set refers to contiguous entries on this process, else PETSC_FALSE
310: Level: developer
312: Concepts: index sets^is contiguous
314: .seealso: ISGetLocalSize(), VecGetOwnershipRange()
315: @*/
316: PetscErrorCode ISContiguousLocal(IS is,PetscInt gstart,PetscInt gend,PetscInt *start,PetscBool *contig)
317: {
324: if (is->ops->contiguous) {
325: (*is->ops->contiguous)(is,gstart,gend,start,contig);
326: } else {
327: *start = -1;
328: *contig = PETSC_FALSE;
329: }
330: return(0);
331: }
333: /*@
334: ISPermutation - PETSC_TRUE or PETSC_FALSE depending on whether the
335: index set has been declared to be a permutation.
337: Logically Collective on IS
339: Input Parmeters:
340: . is - the index set
342: Output Parameters:
343: . perm - PETSC_TRUE if a permutation, else PETSC_FALSE
345: Level: intermediate
347: Concepts: permutation
348: Concepts: index sets^is permutation
350: .seealso: ISSetPermutation()
351: @*/
352: PetscErrorCode ISPermutation(IS is,PetscBool *perm)
353: {
357: *perm = (PetscBool) is->isperm;
358: return(0);
359: }
361: /*@
362: ISSetPermutation - Informs the index set that it is a permutation.
364: Logically Collective on IS
366: Input Parmeters:
367: . is - the index set
369: Level: intermediate
371: Concepts: permutation
372: Concepts: index sets^permutation
374: The debug version of the libraries (./configure --with-debugging=1) checks if the
375: index set is actually a permutation. The optimized version just believes you.
377: .seealso: ISPermutation()
378: @*/
379: PetscErrorCode ISSetPermutation(IS is)
380: {
383: #if defined(PETSC_USE_DEBUG)
384: {
385: PetscMPIInt size;
388: MPI_Comm_size(PetscObjectComm((PetscObject)is),&size);
389: if (size == 1) {
390: PetscInt i,n,*idx;
391: const PetscInt *iidx;
393: ISGetSize(is,&n);
394: PetscMalloc1(n,&idx);
395: ISGetIndices(is,&iidx);
396: PetscMemcpy(idx,iidx,n*sizeof(PetscInt));
397: PetscSortInt(n,idx);
398: for (i=0; i<n; i++) {
399: if (idx[i] != i) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Index set is not a permutation");
400: }
401: PetscFree(idx);
402: ISRestoreIndices(is,&iidx);
403: }
404: }
405: #endif
406: is->isperm = PETSC_TRUE;
407: return(0);
408: }
410: /*@
411: ISDestroy - Destroys an index set.
413: Collective on IS
415: Input Parameters:
416: . is - the index set
418: Level: beginner
420: .seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlocked()
421: @*/
422: PetscErrorCode ISDestroy(IS *is)
423: {
427: if (!*is) return(0);
429: if (--((PetscObject)(*is))->refct > 0) {*is = 0; return(0);}
430: if ((*is)->complement) {
431: PetscInt refcnt;
432: PetscObjectGetReference((PetscObject)((*is)->complement), &refcnt);
433: if (refcnt > 1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Nonlocal IS has not been restored");
434: ISDestroy(&(*is)->complement);
435: }
436: if ((*is)->ops->destroy) {
437: (*(*is)->ops->destroy)(*is);
438: }
439: PetscLayoutDestroy(&(*is)->map);
440: /* Destroy local representations of offproc data. */
441: PetscFree((*is)->total);
442: PetscFree((*is)->nonlocal);
443: PetscHeaderDestroy(is);
444: return(0);
445: }
447: /*@
448: ISInvertPermutation - Creates a new permutation that is the inverse of
449: a given permutation.
451: Collective on IS
453: Input Parameter:
454: + is - the index set
455: - nlocal - number of indices on this processor in result (ignored for 1 proccessor) or
456: use PETSC_DECIDE
458: Output Parameter:
459: . isout - the inverse permutation
461: Level: intermediate
463: Notes:
464: For parallel index sets this does the complete parallel permutation, but the
465: code is not efficient for huge index sets (10,000,000 indices).
467: Concepts: inverse permutation
468: Concepts: permutation^inverse
469: Concepts: index sets^inverting
470: @*/
471: PetscErrorCode ISInvertPermutation(IS is,PetscInt nlocal,IS *isout)
472: {
478: if (!is->isperm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a permutation, must call ISSetPermutation() on the IS first");
479: if (is->isidentity) {
480: ISDuplicate(is,isout);
481: } else {
482: (*is->ops->invertpermutation)(is,nlocal,isout);
483: ISSetPermutation(*isout);
484: }
485: return(0);
486: }
488: /*@
489: ISGetSize - Returns the global length of an index set.
491: Not Collective
493: Input Parameter:
494: . is - the index set
496: Output Parameter:
497: . size - the global size
499: Level: beginner
501: Concepts: size^of index set
502: Concepts: index sets^size
504: @*/
505: PetscErrorCode ISGetSize(IS is,PetscInt *size)
506: {
512: (*is->ops->getsize)(is,size);
513: return(0);
514: }
516: /*@
517: ISGetLocalSize - Returns the local (processor) length of an index set.
519: Not Collective
521: Input Parameter:
522: . is - the index set
524: Output Parameter:
525: . size - the local size
527: Level: beginner
529: Concepts: size^of index set
530: Concepts: local size^of index set
531: Concepts: index sets^local size
533: @*/
534: PetscErrorCode ISGetLocalSize(IS is,PetscInt *size)
535: {
541: (*is->ops->getlocalsize)(is,size);
542: return(0);
543: }
545: /*@C
546: ISGetIndices - Returns a pointer to the indices. The user should call
547: ISRestoreIndices() after having looked at the indices. The user should
548: NOT change the indices.
550: Not Collective
552: Input Parameter:
553: . is - the index set
555: Output Parameter:
556: . ptr - the location to put the pointer to the indices
558: Fortran Note:
559: This routine has two different interfaces from Fortran; the first is not recommend, it does not require Fortran 90
560: $ IS is
561: $ integer is_array(1)
562: $ PetscOffset i_is
563: $ int ierr
564: $ call ISGetIndices(is,is_array,i_is,ierr)
565: $
566: $ Access first local entry in list
567: $ value = is_array(i_is + 1)
568: $
569: $ ...... other code
570: $ call ISRestoreIndices(is,is_array,i_is,ierr)
571: The second Fortran interface is recommended.
572: $ use petscisdef
573: $ PetscInt, pointer :: array(:)
574: $ PetscErrorCode ierr
575: $ IS i
576: $ call ISGetIndicesF90(i,array,ierr)
580: See the Fortran chapter of the users manual and
581: petsc/src/is/examples/[tutorials,tests] for details.
583: Level: intermediate
585: Concepts: index sets^getting indices
586: Concepts: indices of index set
588: .seealso: ISRestoreIndices(), ISGetIndicesF90()
589: @*/
590: PetscErrorCode ISGetIndices(IS is,const PetscInt *ptr[])
591: {
597: (*is->ops->getindices)(is,ptr);
598: return(0);
599: }
601: /*@C
602: ISGetMinMax - Gets the minimum and maximum values in an IS
604: Not Collective
606: Input Parameter:
607: . is - the index set
609: Output Parameter:
610: + min - the minimum value
611: - max - the maximum value
613: Level: intermediate
615: Notes:
616: Empty index sets return min=PETSC_MAX_INT and max=PETSC_MIN_INT.
617: In parallel, it returns the min and max of the local portion of the IS
619: Concepts: index sets^getting indices
620: Concepts: indices of index set
622: .seealso: ISGetIndices(), ISRestoreIndices(), ISGetIndicesF90()
623: @*/
624: PetscErrorCode ISGetMinMax(IS is,PetscInt *min,PetscInt *max)
625: {
628: if (min) *min = is->min;
629: if (max) *max = is->max;
630: return(0);
631: }
633: /*@
634: ISLocate - determine the location of an index within the local component of an index set
636: Not Collective
638: Input Parameter:
639: + is - the index set
640: - key - the search key
642: Output Parameter:
643: . location - if >= 0, a location within the index set that is equal to the key, otherwise the key is not in the index set
645: Level: intermediate
646: @*/
647: PetscErrorCode ISLocate(IS is, PetscInt key, PetscInt *location)
648: {
652: if (is->ops->locate) {
653: (*is->ops->locate)(is,key,location);
654: } else {
655: PetscInt numIdx;
656: PetscBool sorted;
657: const PetscInt *idx;
659: ISGetLocalSize(is,&numIdx);
660: ISGetIndices(is,&idx);
661: ISSorted(is,&sorted);
662: if (sorted) {
663: PetscFindInt(key,numIdx,idx,location);
664: } else {
665: PetscInt i;
667: *location = -1;
668: for (i = 0; i < numIdx; i++) {
669: if (idx[i] == key) {
670: *location = i;
671: break;
672: }
673: }
674: }
675: ISRestoreIndices(is,&idx);
676: }
677: return(0);
678: }
680: /*@C
681: ISRestoreIndices - Restores an index set to a usable state after a call
682: to ISGetIndices().
684: Not Collective
686: Input Parameters:
687: + is - the index set
688: - ptr - the pointer obtained by ISGetIndices()
690: Fortran Note:
691: This routine is used differently from Fortran
692: $ IS is
693: $ integer is_array(1)
694: $ PetscOffset i_is
695: $ int ierr
696: $ call ISGetIndices(is,is_array,i_is,ierr)
697: $
698: $ Access first local entry in list
699: $ value = is_array(i_is + 1)
700: $
701: $ ...... other code
702: $ call ISRestoreIndices(is,is_array,i_is,ierr)
704: See the Fortran chapter of the users manual and
705: petsc/src/is/examples/[tutorials,tests] for details.
707: Level: intermediate
709: Note:
710: This routine zeros out ptr. This is to prevent accidental us of the array after it has been restored.
712: .seealso: ISGetIndices(), ISRestoreIndicesF90()
713: @*/
714: PetscErrorCode ISRestoreIndices(IS is,const PetscInt *ptr[])
715: {
721: if (is->ops->restoreindices) {
722: (*is->ops->restoreindices)(is,ptr);
723: }
724: return(0);
725: }
727: static PetscErrorCode ISGatherTotal_Private(IS is)
728: {
730: PetscInt i,n,N;
731: const PetscInt *lindices;
732: MPI_Comm comm;
733: PetscMPIInt rank,size,*sizes = NULL,*offsets = NULL,nn;
738: PetscObjectGetComm((PetscObject)is,&comm);
739: MPI_Comm_size(comm,&size);
740: MPI_Comm_rank(comm,&rank);
741: ISGetLocalSize(is,&n);
742: PetscMalloc2(size,&sizes,size,&offsets);
744: PetscMPIIntCast(n,&nn);
745: MPI_Allgather(&nn,1,MPI_INT,sizes,1,MPI_INT,comm);
746: offsets[0] = 0;
747: for (i=1; i<size; ++i) offsets[i] = offsets[i-1] + sizes[i-1];
748: N = offsets[size-1] + sizes[size-1];
750: PetscMalloc1(N,&(is->total));
751: ISGetIndices(is,&lindices);
752: MPI_Allgatherv((void*)lindices,nn,MPIU_INT,is->total,sizes,offsets,MPIU_INT,comm);
753: ISRestoreIndices(is,&lindices);
754: is->local_offset = offsets[rank];
755: PetscFree2(sizes,offsets);
756: return(0);
757: }
759: /*@C
760: ISGetTotalIndices - Retrieve an array containing all indices across the communicator.
762: Collective on IS
764: Input Parameter:
765: . is - the index set
767: Output Parameter:
768: . indices - total indices with rank 0 indices first, and so on; total array size is
769: the same as returned with ISGetSize().
771: Level: intermediate
773: Notes:
774: this is potentially nonscalable, but depends on the size of the total index set
775: and the size of the communicator. This may be feasible for index sets defined on
776: subcommunicators, such that the set size does not grow with PETSC_WORLD_COMM.
777: Note also that there is no way to tell where the local part of the indices starts
778: (use ISGetIndices() and ISGetNonlocalIndices() to retrieve just the local and just
779: the nonlocal part (complement), respectively).
781: Concepts: index sets^getting nonlocal indices
782: .seealso: ISRestoreTotalIndices(), ISGetNonlocalIndices(), ISGetSize()
783: @*/
784: PetscErrorCode ISGetTotalIndices(IS is, const PetscInt *indices[])
785: {
787: PetscMPIInt size;
792: MPI_Comm_size(PetscObjectComm((PetscObject)is), &size);
793: if (size == 1) {
794: (*is->ops->getindices)(is,indices);
795: } else {
796: if (!is->total) {
797: ISGatherTotal_Private(is);
798: }
799: *indices = is->total;
800: }
801: return(0);
802: }
804: /*@C
805: ISRestoreTotalIndices - Restore the index array obtained with ISGetTotalIndices().
807: Not Collective.
809: Input Parameter:
810: + is - the index set
811: - indices - index array; must be the array obtained with ISGetTotalIndices()
813: Level: intermediate
815: Concepts: index sets^getting nonlocal indices
816: Concepts: index sets^restoring nonlocal indices
817: .seealso: ISRestoreTotalIndices(), ISGetNonlocalIndices()
818: @*/
819: PetscErrorCode ISRestoreTotalIndices(IS is, const PetscInt *indices[])
820: {
822: PetscMPIInt size;
827: MPI_Comm_size(PetscObjectComm((PetscObject)is), &size);
828: if (size == 1) {
829: (*is->ops->restoreindices)(is,indices);
830: } else {
831: if (is->total != *indices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Index array pointer being restored does not point to the array obtained from the IS.");
832: }
833: return(0);
834: }
835: /*@C
836: ISGetNonlocalIndices - Retrieve an array of indices from remote processors
837: in this communicator.
839: Collective on IS
841: Input Parameter:
842: . is - the index set
844: Output Parameter:
845: . indices - indices with rank 0 indices first, and so on, omitting
846: the current rank. Total number of indices is the difference
847: total and local, obtained with ISGetSize() and ISGetLocalSize(),
848: respectively.
850: Level: intermediate
852: Notes:
853: restore the indices using ISRestoreNonlocalIndices().
854: The same scalability considerations as those for ISGetTotalIndices
855: apply here.
857: Concepts: index sets^getting nonlocal indices
858: .seealso: ISGetTotalIndices(), ISRestoreNonlocalIndices(), ISGetSize(), ISGetLocalSize().
859: @*/
860: PetscErrorCode ISGetNonlocalIndices(IS is, const PetscInt *indices[])
861: {
863: PetscMPIInt size;
864: PetscInt n, N;
869: MPI_Comm_size(PetscObjectComm((PetscObject)is), &size);
870: if (size == 1) *indices = NULL;
871: else {
872: if (!is->total) {
873: ISGatherTotal_Private(is);
874: }
875: ISGetLocalSize(is,&n);
876: ISGetSize(is,&N);
877: PetscMalloc1(N-n, &(is->nonlocal));
878: PetscMemcpy(is->nonlocal, is->total, sizeof(PetscInt)*is->local_offset);
879: PetscMemcpy(is->nonlocal+is->local_offset, is->total+is->local_offset+n, sizeof(PetscInt)*(N - is->local_offset - n));
880: *indices = is->nonlocal;
881: }
882: return(0);
883: }
885: /*@C
886: ISRestoreTotalIndices - Restore the index array obtained with ISGetNonlocalIndices().
888: Not Collective.
890: Input Parameter:
891: + is - the index set
892: - indices - index array; must be the array obtained with ISGetNonlocalIndices()
894: Level: intermediate
896: Concepts: index sets^getting nonlocal indices
897: Concepts: index sets^restoring nonlocal indices
898: .seealso: ISGetTotalIndices(), ISGetNonlocalIndices(), ISRestoreTotalIndices()
899: @*/
900: PetscErrorCode ISRestoreNonlocalIndices(IS is, const PetscInt *indices[])
901: {
905: if (is->nonlocal != *indices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Index array pointer being restored does not point to the array obtained from the IS.");
906: return(0);
907: }
909: /*@
910: ISGetNonlocalIS - Gather all nonlocal indices for this IS and present
911: them as another sequential index set.
914: Collective on IS
916: Input Parameter:
917: . is - the index set
919: Output Parameter:
920: . complement - sequential IS with indices identical to the result of
921: ISGetNonlocalIndices()
923: Level: intermediate
925: Notes:
926: complement represents the result of ISGetNonlocalIndices as an IS.
927: Therefore scalability issues similar to ISGetNonlocalIndices apply.
928: The resulting IS must be restored using ISRestoreNonlocalIS().
930: Concepts: index sets^getting nonlocal indices
931: .seealso: ISGetNonlocalIndices(), ISRestoreNonlocalIndices(), ISAllGather(), ISGetSize()
932: @*/
933: PetscErrorCode ISGetNonlocalIS(IS is, IS *complement)
934: {
940: /* Check if the complement exists already. */
941: if (is->complement) {
942: *complement = is->complement;
943: PetscObjectReference((PetscObject)(is->complement));
944: } else {
945: PetscInt N, n;
946: const PetscInt *idx;
947: ISGetSize(is, &N);
948: ISGetLocalSize(is,&n);
949: ISGetNonlocalIndices(is, &idx);
950: ISCreateGeneral(PETSC_COMM_SELF, N-n,idx, PETSC_USE_POINTER, &(is->complement));
951: PetscObjectReference((PetscObject)is->complement);
952: *complement = is->complement;
953: }
954: return(0);
955: }
958: /*@
959: ISRestoreNonlocalIS - Restore the IS obtained with ISGetNonlocalIS().
961: Not collective.
963: Input Parameter:
964: + is - the index set
965: - complement - index set of is's nonlocal indices
967: Level: intermediate
970: Concepts: index sets^getting nonlocal indices
971: Concepts: index sets^restoring nonlocal indices
972: .seealso: ISGetNonlocalIS(), ISGetNonlocalIndices(), ISRestoreNonlocalIndices()
973: @*/
974: PetscErrorCode ISRestoreNonlocalIS(IS is, IS *complement)
975: {
977: PetscInt refcnt;
982: if (*complement != is->complement) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Complement IS being restored was not obtained with ISGetNonlocalIS()");
983: PetscObjectGetReference((PetscObject)(is->complement), &refcnt);
984: if (refcnt <= 1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Duplicate call to ISRestoreNonlocalIS() detected");
985: PetscObjectDereference((PetscObject)(is->complement));
986: return(0);
987: }
989: /*@C
990: ISView - Displays an index set.
992: Collective on IS
994: Input Parameters:
995: + is - the index set
996: - viewer - viewer used to display the set, for example PETSC_VIEWER_STDOUT_SELF.
998: Level: intermediate
1000: .seealso: PetscViewerASCIIOpen()
1001: @*/
1002: PetscErrorCode ISView(IS is,PetscViewer viewer)
1003: {
1008: if (!viewer) {
1009: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)is),&viewer);
1010: }
1014: PetscObjectPrintClassNamePrefixType((PetscObject)is,viewer);
1015: (*is->ops->view)(is,viewer);
1016: return(0);
1017: }
1019: /*@
1020: ISLoad - Loads a vector that has been stored in binary or HDF5 format with ISView().
1022: Collective on PetscViewer
1024: Input Parameters:
1025: + is - the newly loaded vector, this needs to have been created with ISCreate() or some related function before a call to ISLoad().
1026: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or HDF5 file viewer, obtained from PetscViewerHDF5Open()
1028: Level: intermediate
1030: Notes:
1031: IF using HDF5, you must assign the IS the same name as was used in the IS
1032: that was stored in the file using PetscObjectSetName(). Otherwise you will
1033: get the error message: "Cannot H5DOpen2() with Vec name NAMEOFOBJECT"
1035: Concepts: index set^loading from file
1037: .seealso: PetscViewerBinaryOpen(), ISView(), MatLoad(), VecLoad()
1038: @*/
1039: PetscErrorCode ISLoad(IS is, PetscViewer viewer)
1040: {
1041: PetscBool isbinary, ishdf5;
1047: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERBINARY, &isbinary);
1048: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERHDF5, &ishdf5);
1049: if (!isbinary && !ishdf5) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1050: if (!((PetscObject)is)->type_name) {ISSetType(is, ISGENERAL);}
1051: (*is->ops->load)(is, viewer);
1052: return(0);
1053: }
1055: /*@
1056: ISSort - Sorts the indices of an index set.
1058: Collective on IS
1060: Input Parameters:
1061: . is - the index set
1063: Level: intermediate
1065: Concepts: index sets^sorting
1066: Concepts: sorting^index set
1068: .seealso: ISSortRemoveDups(), ISSorted()
1069: @*/
1070: PetscErrorCode ISSort(IS is)
1071: {
1076: (*is->ops->sort)(is);
1077: return(0);
1078: }
1080: /*@
1081: ISSortRemoveDups - Sorts the indices of an index set, removing duplicates.
1083: Collective on IS
1085: Input Parameters:
1086: . is - the index set
1088: Level: intermediate
1090: Concepts: index sets^sorting
1091: Concepts: sorting^index set
1093: .seealso: ISSort(), ISSorted()
1094: @*/
1095: PetscErrorCode ISSortRemoveDups(IS is)
1096: {
1101: (*is->ops->sortremovedups)(is);
1102: return(0);
1103: }
1105: /*@
1106: ISToGeneral - Converts an IS object of any type to ISGENERAL type
1108: Collective on IS
1110: Input Parameters:
1111: . is - the index set
1113: Level: intermediate
1115: Concepts: index sets^sorting
1116: Concepts: sorting^index set
1118: .seealso: ISSorted()
1119: @*/
1120: PetscErrorCode ISToGeneral(IS is)
1121: {
1126: if (is->ops->togeneral) {
1127: (*is->ops->togeneral)(is);
1128: } else SETERRQ1(PetscObjectComm((PetscObject)is),PETSC_ERR_SUP,"Not written for this type %s",((PetscObject)is)->type_name);
1129: return(0);
1130: }
1132: /*@
1133: ISSorted - Checks the indices to determine whether they have been sorted.
1135: Collective on IS
1137: Input Parameter:
1138: . is - the index set
1140: Output Parameter:
1141: . flg - output flag, either PETSC_TRUE if the index set is sorted,
1142: or PETSC_FALSE otherwise.
1144: Notes:
1145: For parallel IS objects this only indicates if the local part of the IS
1146: is sorted. So some processors may return PETSC_TRUE while others may
1147: return PETSC_FALSE.
1149: Level: intermediate
1151: .seealso: ISSort(), ISSortRemoveDups()
1152: @*/
1153: PetscErrorCode ISSorted(IS is,PetscBool *flg)
1154: {
1160: (*is->ops->sorted)(is,flg);
1161: return(0);
1162: }
1164: /*@
1165: ISDuplicate - Creates a duplicate copy of an index set.
1167: Collective on IS
1169: Input Parmeters:
1170: . is - the index set
1172: Output Parameters:
1173: . isnew - the copy of the index set
1175: Level: beginner
1177: Concepts: index sets^duplicating
1179: .seealso: ISCreateGeneral(), ISCopy()
1180: @*/
1181: PetscErrorCode ISDuplicate(IS is,IS *newIS)
1182: {
1188: (*is->ops->duplicate)(is,newIS);
1189: (*newIS)->isidentity = is->isidentity;
1190: (*newIS)->isperm = is->isperm;
1191: return(0);
1192: }
1194: /*@
1195: ISCopy - Copies an index set.
1197: Collective on IS
1199: Input Parmeters:
1200: . is - the index set
1202: Output Parameters:
1203: . isy - the copy of the index set
1205: Level: beginner
1207: Concepts: index sets^copying
1209: .seealso: ISDuplicate()
1210: @*/
1211: PetscErrorCode ISCopy(IS is,IS isy)
1212: {
1219: if (is == isy) return(0);
1220: (*is->ops->copy)(is,isy);
1221: isy->isperm = is->isperm;
1222: isy->max = is->max;
1223: isy->min = is->min;
1224: isy->isidentity = is->isidentity;
1225: return(0);
1226: }
1228: /*@
1229: ISOnComm - Split a parallel IS on subcomms (usually self) or concatenate index sets on subcomms into a parallel index set
1231: Collective on IS and comm
1233: Input Arguments:
1234: + is - index set
1235: . comm - communicator for new index set
1236: - mode - copy semantics, PETSC_USE_POINTER for no-copy if possible, otherwise PETSC_COPY_VALUES
1238: Output Arguments:
1239: . newis - new IS on comm
1241: Level: advanced
1243: Notes:
1244: It is usually desirable to create a parallel IS and look at the local part when necessary.
1246: This function is useful if serial ISs must be created independently, or to view many
1247: logically independent serial ISs.
1249: The input IS must have the same type on every process.
1251: .seealso: ISSplit()
1252: @*/
1253: PetscErrorCode ISOnComm(IS is,MPI_Comm comm,PetscCopyMode mode,IS *newis)
1254: {
1256: PetscMPIInt match;
1261: MPI_Comm_compare(PetscObjectComm((PetscObject)is),comm,&match);
1262: if (mode != PETSC_COPY_VALUES && (match == MPI_IDENT || match == MPI_CONGRUENT)) {
1263: PetscObjectReference((PetscObject)is);
1264: *newis = is;
1265: } else {
1266: (*is->ops->oncomm)(is,comm,mode,newis);
1267: }
1268: return(0);
1269: }
1271: /*@
1272: ISSetBlockSize - informs an index set that it has a given block size
1274: Logicall Collective on IS
1276: Input Arguments:
1277: + is - index set
1278: - bs - block size
1280: Level: intermediate
1282: .seealso: ISGetBlockSize(), ISCreateBlock()
1283: @*/
1284: PetscErrorCode ISSetBlockSize(IS is,PetscInt bs)
1285: {
1291: if (bs < 1) SETERRQ1(PetscObjectComm((PetscObject)is),PETSC_ERR_ARG_OUTOFRANGE,"Block size %D, must be positive",bs);
1292: (*is->ops->setblocksize)(is,bs);
1293: return(0);
1294: }
1296: /*@
1297: ISGetBlockSize - Returns the number of elements in a block.
1299: Not Collective
1301: Input Parameter:
1302: . is - the index set
1304: Output Parameter:
1305: . size - the number of elements in a block
1307: Level: intermediate
1309: Concepts: IS^block size
1310: Concepts: index sets^block size
1312: .seealso: ISBlockGetSize(), ISGetSize(), ISCreateBlock(), ISSetBlockSize()
1313: @*/
1314: PetscErrorCode ISGetBlockSize(IS is,PetscInt *size)
1315: {
1319: PetscLayoutGetBlockSize(is->map, size);
1320: return(0);
1321: }
1323: PetscErrorCode ISGetIndicesCopy(IS is, PetscInt idx[])
1324: {
1326: PetscInt len,i;
1327: const PetscInt *ptr;
1330: ISGetSize(is,&len);
1331: ISGetIndices(is,&ptr);
1332: for (i=0; i<len; i++) idx[i] = ptr[i];
1333: ISRestoreIndices(is,&ptr);
1334: return(0);
1335: }
1337: /*MC
1338: ISGetIndicesF90 - Accesses the elements of an index set from Fortran90.
1339: The users should call ISRestoreIndicesF90() after having looked at the
1340: indices. The user should NOT change the indices.
1342: Synopsis:
1343: ISGetIndicesF90(IS x,{integer, pointer :: xx_v(:)},integer ierr)
1345: Not collective
1347: Input Parameter:
1348: . x - index set
1350: Output Parameters:
1351: + xx_v - the Fortran90 pointer to the array
1352: - ierr - error code
1354: Example of Usage:
1355: .vb
1356: PetscInt, pointer xx_v(:)
1357: ....
1358: call ISGetIndicesF90(x,xx_v,ierr)
1359: a = xx_v(3)
1360: call ISRestoreIndicesF90(x,xx_v,ierr)
1361: .ve
1363: Level: intermediate
1365: .seealso: ISRestoreIndicesF90(), ISGetIndices(), ISRestoreIndices()
1367: Concepts: index sets^getting indices in f90
1368: Concepts: indices of index set in f90
1370: M*/
1372: /*MC
1373: ISRestoreIndicesF90 - Restores an index set to a usable state after
1374: a call to ISGetIndicesF90().
1376: Synopsis:
1377: ISRestoreIndicesF90(IS x,{integer, pointer :: xx_v(:)},integer ierr)
1379: Not collective
1381: Input Parameters:
1382: . x - index set
1383: . xx_v - the Fortran90 pointer to the array
1385: Output Parameter:
1386: . ierr - error code
1389: Example of Usage:
1390: .vb
1391: PetscInt, pointer xx_v(:)
1392: ....
1393: call ISGetIndicesF90(x,xx_v,ierr)
1394: a = xx_v(3)
1395: call ISRestoreIndicesF90(x,xx_v,ierr)
1396: .ve
1398: Level: intermediate
1400: .seealso: ISGetIndicesF90(), ISGetIndices(), ISRestoreIndices()
1402: M*/
1404: /*MC
1405: ISBlockGetIndicesF90 - Accesses the elements of an index set from Fortran90.
1406: The users should call ISBlockRestoreIndicesF90() after having looked at the
1407: indices. The user should NOT change the indices.
1409: Synopsis:
1410: ISBlockGetIndicesF90(IS x,{integer, pointer :: xx_v(:)},integer ierr)
1412: Not collective
1414: Input Parameter:
1415: . x - index set
1417: Output Parameters:
1418: + xx_v - the Fortran90 pointer to the array
1419: - ierr - error code
1420: Example of Usage:
1421: .vb
1422: PetscInt, pointer xx_v(:)
1423: ....
1424: call ISBlockGetIndicesF90(x,xx_v,ierr)
1425: a = xx_v(3)
1426: call ISBlockRestoreIndicesF90(x,xx_v,ierr)
1427: .ve
1429: Level: intermediate
1431: .seealso: ISBlockRestoreIndicesF90(), ISGetIndices(), ISRestoreIndices(),
1432: ISRestoreIndices()
1434: Concepts: index sets^getting block indices in f90
1435: Concepts: indices of index set in f90
1436: Concepts: block^ indices of index set in f90
1438: M*/
1440: /*MC
1441: ISBlockRestoreIndicesF90 - Restores an index set to a usable state after
1442: a call to ISBlockGetIndicesF90().
1444: Synopsis:
1445: ISBlockRestoreIndicesF90(IS x,{integer, pointer :: xx_v(:)},integer ierr)
1447: Not Collective
1449: Input Parameters:
1450: + x - index set
1451: - xx_v - the Fortran90 pointer to the array
1453: Output Parameter:
1454: . ierr - error code
1456: Example of Usage:
1457: .vb
1458: PetscInt, pointer xx_v(:)
1459: ....
1460: call ISBlockGetIndicesF90(x,xx_v,ierr)
1461: a = xx_v(3)
1462: call ISBlockRestoreIndicesF90(x,xx_v,ierr)
1463: .ve
1465: Notes:
1466: Not yet supported for all F90 compilers
1468: Level: intermediate
1470: .seealso: ISBlockGetIndicesF90(), ISGetIndices(), ISRestoreIndices(), ISRestoreIndicesF90()
1472: M*/