Actual source code: psplit.c
2: #include <petscsys.h>
4: /*@
5: PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6: (or global) length via a simple formula. Splits so each processors local size
7: is divisible by the block size.
9: Collective (if N is PETSC_DECIDE)
11: Input Parameters:
12: + comm - MPI communicator that shares the object being divided
13: . bs - block size
14: . n - local length (or PETSC_DECIDE to have it set)
15: - N - global length (or PETSC_DECIDE)
17: Level: developer
19: Notes:
20: n and N cannot be both PETSC_DECIDE
22: If one processor calls this with N of PETSC_DECIDE then all processors
23: must, otherwise the program will hang.
25: .seealso: PetscSplitOwnership()
27: @*/
28: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
29: {
30: PetscMPIInt size,rank;
34: if (*N == PETSC_DECIDE) {
36: MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
37: } else if (*n == PETSC_DECIDE) {
38: PetscInt Nbs = *N/bs;
39: MPI_Comm_size(comm,&size);
40: MPI_Comm_rank(comm,&rank);
41: *n = bs*(Nbs/size + ((Nbs % size) > rank));
42: }
43: return 0;
44: }
46: /*@
47: PetscSplitOwnership - Given a global (or local) length determines a local
48: (or global) length via a simple formula
50: Collective (if n or N is PETSC_DECIDE)
52: Input Parameters:
53: + comm - MPI communicator that shares the object being divided
54: . n - local length (or PETSC_DECIDE to have it set)
55: - N - global length (or PETSC_DECIDE)
57: Level: developer
59: Notes:
60: n and N cannot be both PETSC_DECIDE
62: If one processor calls this with n or N of PETSC_DECIDE then all processors
63: must. Otherwise, an error is thrown in debug mode while the program will hang
64: in optimized (i.e. configured --with-debugging=0) mode.
66: .seealso: PetscSplitOwnershipBlock()
68: @*/
69: PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
70: {
71: PetscMPIInt size,rank;
74: if (PetscDefined(USE_DEBUG)) {
75: PetscMPIInt l[2],g[2];
76: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
77: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
78: MPI_Comm_size(comm,&size);
79: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
82: }
84: if (*N == PETSC_DECIDE) {
85: PetscInt64 m = *n, M;
86: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
88: else *N = (PetscInt)M;
89: } else if (*n == PETSC_DECIDE) {
90: MPI_Comm_size(comm,&size);
91: MPI_Comm_rank(comm,&rank);
92: *n = *N/size + ((*N % size) > rank);
93: } else if (PetscDefined(USE_DEBUG)) {
94: PetscInt tmp;
95: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
97: }
98: return 0;
99: }
101: /*@
102: PetscSplitOwnershipEqual - Given a global (or local) length determines a local
103: (or global) length via a simple formula, trying to have all local lengths equal
105: Collective (if n or N is PETSC_DECIDE)
107: Input Parameters:
108: + comm - MPI communicator that shares the object being divided
109: . n - local length (or PETSC_DECIDE to have it set)
110: - N - global length (or PETSC_DECIDE)
112: Level: developer
114: Notes:
115: This is intended to be used with MATSCALAPACK, where the local size must
116: be equal in all processes (except possibly the last one). For instance,
117: the local sizes when spliting N=50 with 6 processes are 9,9,9,9,9,5
119: n and N cannot be both PETSC_DECIDE
121: If one processor calls this with n or N of PETSC_DECIDE then all processors
122: must. Otherwise, an error is thrown in debug mode while the program will hang
123: in optimized (i.e. configured --with-debugging=0) mode.
125: .seealso: PetscSplitOwnership(), PetscSplitOwnershipBlock()
127: @*/
128: PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt *n,PetscInt *N)
129: {
130: PetscMPIInt size,rank;
133: if (PetscDefined(USE_DEBUG)) {
134: PetscMPIInt l[2],g[2];
135: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
136: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
137: MPI_Comm_size(comm,&size);
138: MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);
141: }
143: if (*N == PETSC_DECIDE) {
144: PetscInt64 m = *n, M;
145: MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);
147: else *N = (PetscInt)M;
148: } else if (*n == PETSC_DECIDE) {
149: MPI_Comm_size(comm,&size);
150: MPI_Comm_rank(comm,&rank);
151: *n = *N/size;
152: if (*N % size) {
153: if ((rank+1)*(*n+1)<=*N) *n = *n+1;
154: else if (rank*(*n+1)<=*N) *n = *N-rank*(*n+1);
155: else *n = 0;
156: }
157: } else if (PetscDefined(USE_DEBUG)) {
158: PetscInt tmp;
159: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
161: }
162: return 0;
163: }