Actual source code: psplit.c
2: #include <petscsys.h>
4: /*@
5: PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6: (or global) length via a simple formula. Splits so each processors local size
7: is divisible by the block size.
9: Collective (if N is PETSC_DECIDE)
11: Input Parameters:
12: + comm - MPI communicator that shares the object being divided
13: . bs - block size
14: . n - local length (or PETSC_DECIDE to have it set)
15: - N - global length (or PETSC_DECIDE)
17: Level: developer
19: Notes:
20: n and N cannot be both PETSC_DECIDE
22: If one processor calls this with N of PETSC_DECIDE then all processors
23: must, otherwise the program will hang.
25: .seealso: PetscSplitOwnership()
27: @*/
28: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
29: {
31: PetscMPIInt size,rank;
34: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
36: if (*N == PETSC_DECIDE) {
37: if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
38: MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
39: } else if (*n == PETSC_DECIDE) {
40: PetscInt Nbs = *N/bs;
41: MPI_Comm_size(comm,&size);
42: MPI_Comm_rank(comm,&rank);
43: *n = bs*(Nbs/size + ((Nbs % size) > rank));
44: }
45: return(0);
46: }
48: /*@
49: PetscSplitOwnership - Given a global (or local) length determines a local
50: (or global) length via a simple formula
52: Collective (if n or N is PETSC_DECIDE)
54: Input Parameters:
55: + comm - MPI communicator that shares the object being divided
56: . n - local length (or PETSC_DECIDE to have it set)
57: - N - global length (or PETSC_DECIDE)
59: Level: developer
61: Notes:
62: n and N cannot be both PETSC_DECIDE
64: If one processor calls this with n or N of PETSC_DECIDE then all processors
65: must. Otherwise, an error is thrown in debug mode while the program will hang
66: in optimized (i.e. configured --with-debugging=0) mode.
68: .seealso: PetscSplitOwnershipBlock()
70: @*/
71: PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
72: {
74: PetscMPIInt size,rank;
77: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://petsc.org/release/faq/#split-ownership");
78: if (PetscDefined(USE_DEBUG)) {
79: PetscMPIInt l[2],g[2];
80: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
81: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
82: MPI_Comm_size(comm,&size);
83: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
84: if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
85: if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
86: }
88: if (*N == PETSC_DECIDE) {
89: PetscInt64 m = *n, M;
90: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
91: if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
92: else *N = (PetscInt)M;
93: } else if (*n == PETSC_DECIDE) {
94: MPI_Comm_size(comm,&size);
95: MPI_Comm_rank(comm,&rank);
96: *n = *N/size + ((*N % size) > rank);
97: } else if (PetscDefined(USE_DEBUG)) {
98: PetscInt tmp;
99: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
100: if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://petsc.org/release/faq/#split-ownership",tmp,*N,*n);
101: }
102: return(0);
103: }
105: /*@
106: PetscSplitOwnershipEqual - Given a global (or local) length determines a local
107: (or global) length via a simple formula, trying to have all local lengths equal
109: Collective (if n or N is PETSC_DECIDE)
111: Input Parameters:
112: + comm - MPI communicator that shares the object being divided
113: . n - local length (or PETSC_DECIDE to have it set)
114: - N - global length (or PETSC_DECIDE)
116: Level: developer
118: Notes:
119: This is intended to be used with MATSCALAPACK, where the local size must
120: be equal in all processes (except possibly the last one). For instance,
121: the local sizes when spliting N=50 with 6 processes are 9,9,9,9,9,5
123: n and N cannot be both PETSC_DECIDE
125: If one processor calls this with n or N of PETSC_DECIDE then all processors
126: must. Otherwise, an error is thrown in debug mode while the program will hang
127: in optimized (i.e. configured --with-debugging=0) mode.
129: .seealso: PetscSplitOwnership(), PetscSplitOwnershipBlock()
131: @*/
132: PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt *n,PetscInt *N)
133: {
135: PetscMPIInt size,rank;
138: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
139: if (PetscDefined(USE_DEBUG)) {
140: PetscMPIInt l[2],g[2];
141: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
142: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
143: MPI_Comm_size(comm,&size);
144: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
145: if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
146: if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
147: }
149: if (*N == PETSC_DECIDE) {
150: PetscInt64 m = *n, M;
151: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
152: if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
153: else *N = (PetscInt)M;
154: } else if (*n == PETSC_DECIDE) {
155: MPI_Comm_size(comm,&size);
156: MPI_Comm_rank(comm,&rank);
157: *n = *N/size;
158: if (*N % size) {
159: if ((rank+1)*(*n+1)<=*N) *n = *n+1;
160: else if (rank*(*n+1)<=*N) *n = *N-rank*(*n+1);
161: else *n = 0;
162: }
163: } else if (PetscDefined(USE_DEBUG)) {
164: PetscInt tmp;
165: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
166: if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D",tmp,*N,*n);
167: }
168: return(0);
169: }