Actual source code: shvec.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: /*
  3:    This file contains routines for Parallel vector operations that use shared memory
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>

  7: #if defined(PETSC_USE_SHARED_MEMORY)

  9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);

 11: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
 12: {
 14:   Vec_MPI        *w = (Vec_MPI*)win->data;
 15:   PetscScalar    *array;

 18:   /* first processor allocates entire array and sends it's address to the others */
 19:   PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);

 21:   VecCreate(PetscObjectComm((PetscObject)win),v);
 22:   VecSetSizes(*v,win->map->n,win->map->N);
 23:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);
 24:   PetscLayoutReference(win->map,&(*v)->map);

 26:   /* New vector should inherit stashing property of parent */
 27:   (*v)->stash.donotstash   = win->stash.donotstash;
 28:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

 30:   PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);
 31:   PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);

 33:   (*v)->ops->duplicate = VecDuplicate_Shared;
 34:   (*v)->bstash.bs      = win->bstash.bs;
 35:   return(0);
 36: }


 39: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
 40: {
 42:   PetscScalar    *array;

 45:   PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);
 46:   PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);

 48:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);
 49:   vv->ops->duplicate = VecDuplicate_Shared;
 50:   return(0);
 51: }

 53: /* ----------------------------------------------------------------------------------------
 54:      Code to manage shared memory allocation using standard Unix shared memory
 55: */
 56: #include <petscsys.h>
 57: #if defined(PETSC_HAVE_PWD_H)
 58: #include <pwd.h>
 59: #endif
 60: #include <ctype.h>
 61: #include <sys/stat.h>
 62: #if defined(PETSC_HAVE_UNISTD_H)
 63: #include <unistd.h>
 64: #endif
 65: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
 66: #include <sys/utsname.h>
 67: #endif
 68: #include <fcntl.h>
 69: #include <time.h>
 70: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
 71: #include <sys/systeminfo.h>
 72: #endif
 73: #include <sys/shm.h>
 74: #include <sys/mman.h>


 77: static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;

 79: /*
 80:    Private routine to delete internal storage when a communicator is freed.
 81:   This is called by MPI, not by users.

 83:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
 84:   it was MPI_Comm *comm.
 85: */
 86: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
 87: {

 91:   PetscFree(attr_val);
 92:   PetscFunctionReturn(MPI_SUCCESS);
 93: }

 95: /*

 97:     This routine is still incomplete and needs work.

 99:     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
100: cat /etc/sysctl.conf
101: kern.sysv.shmmax=67108864
102: kern.sysv.shmmin=1
103: kern.sysv.shmmni=32
104: kern.sysv.shmseg=512
105: kern.sysv.shmall=1024

107:   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
108: ipcrm to remove the shared memory in use.

110: */
111: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
112: {
114:   PetscInt       shift;
115:   PetscMPIInt    rank,flag;
116:   int            *arena,id,key = 0;
117:   char           *value;

120:   *result = 0;

122:   MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
123:   shift -= llen;

125:   MPI_Comm_rank(comm,&rank);
126:   if (!rank) {
127:     id = shmget(key,len, 0666 |IPC_CREAT);
128:     if (id == -1) {
129:       perror("Unable to malloc shared memory");
130:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
131:     }
132:   } else {
133:     id = shmget(key,len, 0666);
134:     if (id == -1) {
135:       perror("Unable to malloc shared memory");
136:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
137:     }
138:   }
139:   value = shmat(id,(void*)0,0);
140:   if (value == (char*)-1) {
141:     perror("Unable to access shared memory allocated");
142:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
143:   }
144:   *result = (void*) (value + shift);
145:   return(0);
146: }

148: #else

150: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
151: {
153:   PetscMPIInt    size;

156:   MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);
157:   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
158:   VecCreate_Seq(vv);
159:   return(0);
160: }

162: #endif

164: /*@
165:    VecCreateShared - Creates a parallel vector that uses shared memory.

167:    Input Parameters:
168: +  comm - the MPI communicator to use
169: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
170: -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

172:    Output Parameter:
173: .  vv - the vector

175:    Collective

177:    Notes:
178:    Currently VecCreateShared() is available only on the SGI; otherwise,
179:    this routine is the same as VecCreateMPI().

181:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
182:    same type as an existing vector.

184:    Level: advanced

186: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
187:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

189: @*/
190: PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
191: {

195:   VecCreate(comm,v);
196:   VecSetSizes(*v,n,N);
197:   VecSetType(*v,VECSHARED);
198:   return(0);
199: }