Actual source code: psplit.c

petsc-3.4.5 2014-06-29
  2: #include <petscsys.h>           /*I    "petscsys.h" I*/

  6: /*@
  7:     PetscSplitOwnershipBlock - Given a global (or local) length determines a local
  8:         (or global) length via a simple formula. Splits so each processors local size
  9:         is divisible by the block size.

 11:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 13:    Input Parameters:
 14: +    comm - MPI communicator that shares the object being divided
 15: .    bs - block size
 16: .    n - local length (or PETSC_DECIDE to have it set)
 17: -    N - global length (or PETSC_DECIDE)

 19:   Level: developer

 21:    Notes:
 22:      n and N cannot be both PETSC_DECIDE

 24:      If one processor calls this with N of PETSC_DECIDE then all processors
 25:      must, otherwise the program will hang.

 27: .seealso: PetscSplitOwnership()

 29: @*/
 30: PetscErrorCode  PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
 31: {
 33:   PetscMPIInt    size,rank;

 36:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");

 38:   if (*N == PETSC_DECIDE) {
 39:     if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
 40:     MPI_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
 41:   } else if (*n == PETSC_DECIDE) {
 42:     PetscInt Nbs = *N/bs;
 43:     MPI_Comm_size(comm,&size);
 44:     MPI_Comm_rank(comm,&rank);
 45:     *n   = bs*(Nbs/size + ((Nbs % size) > rank));
 46:   }
 47:   return(0);
 48: }


 53: /*@
 54:     PetscSplitOwnership - Given a global (or local) length determines a local
 55:         (or global) length via a simple formula

 57:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 59:    Input Parameters:
 60: +    comm - MPI communicator that shares the object being divided
 61: .    n - local length (or PETSC_DECIDE to have it set)
 62: -    N - global length (or PETSC_DECIDE)

 64:   Level: developer

 66:    Notes:
 67:      n and N cannot be both PETSC_DECIDE

 69:      If one processor calls this with N of PETSC_DECIDE then all processors
 70:      must, otherwise the program will hang.

 72: .seealso: PetscSplitOwnershipBlock()

 74: @*/
 75: PetscErrorCode  PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
 76: {
 78:   PetscMPIInt    size,rank;

 81:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split");

 83:   if (*N == PETSC_DECIDE) {
 84:     MPI_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
 85:   } else if (*n == PETSC_DECIDE) {
 86:     MPI_Comm_size(comm,&size);
 87:     MPI_Comm_rank(comm,&rank);
 88:     *n   = *N/size + ((*N % size) > rank);
 89: #if defined(PETSC_USE_DEBUG)
 90:   } else {
 91:     PetscInt tmp;
 92:     MPI_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
 93:     if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n);
 94: #endif
 95:   }
 96:   return(0);
 97: }