Actual source code: psplit.c
petsc-3.14.6 2021-03-30
2: #include <petscsys.h>
4: /*@
5: PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6: (or global) length via a simple formula. Splits so each processors local size
7: is divisible by the block size.
9: Collective (if N is PETSC_DECIDE)
11: Input Parameters:
12: + comm - MPI communicator that shares the object being divided
13: . bs - block size
14: . n - local length (or PETSC_DECIDE to have it set)
15: - N - global length (or PETSC_DECIDE)
17: Level: developer
19: Notes:
20: n and N cannot be both PETSC_DECIDE
22: If one processor calls this with N of PETSC_DECIDE then all processors
23: must, otherwise the program will hang.
25: .seealso: PetscSplitOwnership()
27: @*/
28: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
29: {
31: PetscMPIInt size,rank;
34: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
36: if (*N == PETSC_DECIDE) {
37: if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
38: MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
39: } else if (*n == PETSC_DECIDE) {
40: PetscInt Nbs = *N/bs;
41: MPI_Comm_size(comm,&size);
42: MPI_Comm_rank(comm,&rank);
43: *n = bs*(Nbs/size + ((Nbs % size) > rank));
44: }
45: return(0);
46: }
49: /*@
50: PetscSplitOwnership - Given a global (or local) length determines a local
51: (or global) length via a simple formula
53: Collective (if n or N is PETSC_DECIDE)
55: Input Parameters:
56: + comm - MPI communicator that shares the object being divided
57: . n - local length (or PETSC_DECIDE to have it set)
58: - N - global length (or PETSC_DECIDE)
60: Level: developer
62: Notes:
63: n and N cannot be both PETSC_DECIDE
65: If one processor calls this with n or N of PETSC_DECIDE then all processors
66: must. Otherwise, an error is thrown in debug mode while the program will hang
67: in optimized (i.e. configured --with-debugging=0) mode.
69: .seealso: PetscSplitOwnershipBlock()
71: @*/
72: PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
73: {
75: PetscMPIInt size,rank;
78: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split");
79: if (PetscDefined(USE_DEBUG)) {
80: PetscMPIInt l[2],g[2];
81: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
82: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
83: MPI_Comm_size(comm,&size);
84: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
85: if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
86: if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
87: }
89: if (*N == PETSC_DECIDE) {
90: PetscInt64 m = *n, M;
91: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
92: if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
93: else *N = (PetscInt)M;
94: } else if (*n == PETSC_DECIDE) {
95: MPI_Comm_size(comm,&size);
96: MPI_Comm_rank(comm,&rank);
97: *n = *N/size + ((*N % size) > rank);
98: } else if (PetscDefined(USE_DEBUG)) {
99: PetscInt tmp;
100: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
101: if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n);
102: }
103: return(0);
104: }
107: /*@
108: PetscSplitOwnershipEqual - Given a global (or local) length determines a local
109: (or global) length via a simple formula, trying to have all local lengths equal
111: Collective (if n or N is PETSC_DECIDE)
113: Input Parameters:
114: + comm - MPI communicator that shares the object being divided
115: . n - local length (or PETSC_DECIDE to have it set)
116: - N - global length (or PETSC_DECIDE)
118: Level: developer
120: Notes:
121: This is intended to be used with MATSCALAPACK, where the local size must
122: be equal in all processes (except possibly the last one). For instance,
123: the local sizes when spliting N=50 with 6 processes are 9,9,9,9,9,5
125: n and N cannot be both PETSC_DECIDE
127: If one processor calls this with n or N of PETSC_DECIDE then all processors
128: must. Otherwise, an error is thrown in debug mode while the program will hang
129: in optimized (i.e. configured --with-debugging=0) mode.
131: .seealso: PetscSplitOwnership(), PetscSplitOwnershipBlock()
133: @*/
134: PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt *n,PetscInt *N)
135: {
137: PetscMPIInt size,rank;
140: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
141: if (PetscDefined(USE_DEBUG)) {
142: PetscMPIInt l[2],g[2];
143: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
144: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
145: MPI_Comm_size(comm,&size);
146: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
147: if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
148: if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
149: }
151: if (*N == PETSC_DECIDE) {
152: PetscInt64 m = *n, M;
153: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
154: if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
155: else *N = (PetscInt)M;
156: } else if (*n == PETSC_DECIDE) {
157: MPI_Comm_size(comm,&size);
158: MPI_Comm_rank(comm,&rank);
159: *n = *N/size;
160: if (*N % size) {
161: if ((rank+1)*(*n+1)<=*N) *n = *n+1;
162: else if (rank*(*n+1)<=*N) *n = *N-rank*(*n+1);
163: else *n = 0;
164: }
165: } else if (PetscDefined(USE_DEBUG)) {
166: PetscInt tmp;
167: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
168: if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D",tmp,*N,*n);
169: }
170: return(0);
171: }