Actual source code: psplit.c
petsc-3.10.5 2019-03-28
2: #include <petscsys.h>
4: /*@
5: PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6: (or global) length via a simple formula. Splits so each processors local size
7: is divisible by the block size.
9: Collective on MPI_Comm (if N is PETSC_DECIDE)
11: Input Parameters:
12: + comm - MPI communicator that shares the object being divided
13: . bs - block size
14: . n - local length (or PETSC_DECIDE to have it set)
15: - N - global length (or PETSC_DECIDE)
17: Level: developer
19: Notes:
20: n and N cannot be both PETSC_DECIDE
22: If one processor calls this with N of PETSC_DECIDE then all processors
23: must, otherwise the program will hang.
25: .seealso: PetscSplitOwnership()
27: @*/
28: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
29: {
31: PetscMPIInt size,rank;
34: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
36: if (*N == PETSC_DECIDE) {
37: if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
38: MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
39: } else if (*n == PETSC_DECIDE) {
40: PetscInt Nbs = *N/bs;
41: MPI_Comm_size(comm,&size);
42: MPI_Comm_rank(comm,&rank);
43: *n = bs*(Nbs/size + ((Nbs % size) > rank));
44: }
45: return(0);
46: }
49: /*@
50: PetscSplitOwnership - Given a global (or local) length determines a local
51: (or global) length via a simple formula
53: Collective on MPI_Comm (if N is PETSC_DECIDE)
55: Input Parameters:
56: + comm - MPI communicator that shares the object being divided
57: . n - local length (or PETSC_DECIDE to have it set)
58: - N - global length (or PETSC_DECIDE)
60: Level: developer
62: Notes:
63: n and N cannot be both PETSC_DECIDE
65: If one processor calls this with N of PETSC_DECIDE then all processors
66: must, otherwise the program will hang.
68: .seealso: PetscSplitOwnershipBlock()
70: @*/
71: PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
72: {
74: PetscMPIInt size,rank;
77: if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split");
79: if (*N == PETSC_DECIDE) {
80: MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
81: } else if (*n == PETSC_DECIDE) {
82: MPI_Comm_size(comm,&size);
83: MPI_Comm_rank(comm,&rank);
84: *n = *N/size + ((*N % size) > rank);
85: #if defined(PETSC_USE_DEBUG)
86: } else {
87: PetscInt tmp;
88: MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
89: if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n);
90: #endif
91: }
92: return(0);
93: }