Actual source code: shvec.c
1: /*
2: This file contains routines for Parallel vector operations that use shared memory
3: */
4: #include src/vec/impls/mpi/pvecimpl.h
6: /*
7: Could not get the include files to work properly on the SGI with
8: the C++ compiler.
9: */
10: #if defined(PETSC_USE_SHARED_MEMORY) && !defined(__cplusplus)
12: EXTERN PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
16: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
17: {
19: Vec_MPI *w = (Vec_MPI *)win->data;
20: PetscScalar *array;
24: /* first processor allocates entire array and sends it's address to the others */
25: PetscSharedMalloc(win->comm,win->n*sizeof(PetscScalar),win->N*sizeof(PetscScalar),(void**)&array);
27: VecCreate(win->comm,v);
28: VecSetSizes(*v,win->n,win->N);
29: VecCreate_MPI_Private(*v,w->nghost,array,win->map);
31: /* New vector should inherit stashing property of parent */
32: (*v)->stash.donotstash = win->stash.donotstash;
33:
34: PetscOListDuplicate(win->olist,&(*v)->olist);
35: PetscFListDuplicate(win->qlist,&(*v)->qlist);
37: if (win->mapping) {
38: (*v)->mapping = win->mapping;
39: PetscObjectReference((PetscObject)win->mapping);
40: }
41: (*v)->ops->duplicate = VecDuplicate_Shared;
42: (*v)->bs = win->bs;
43: (*v)->bstash.bs = win->bstash.bs;
44: return(0);
45: }
51: PetscErrorCode VecCreate_Shared(Vec vv)
52: {
54: PetscScalar *array;
57: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
58: PetscSharedMalloc(vv->comm,vv->n*sizeof(PetscScalar),vv->N*sizeof(PetscScalar),(void**)&array);
60: VecCreate_MPI_Private(vv,0,array,PETSC_NULL);
61: vv->ops->duplicate = VecDuplicate_Shared;
63: return(0);
64: }
68: /* ----------------------------------------------------------------------------------------
69: Code to manage shared memory allocation under the SGI with MPI
71: We associate with a communicator a shared memory "areana" from which memory may be shmalloced.
72: */
73: #include petscsys.h
74: #include "petscfix.h"
75: #if defined(PETSC_HAVE_PWD_H)
76: #include <pwd.h>
77: #endif
78: #include <ctype.h>
79: #include <sys/types.h>
80: #include <sys/stat.h>
81: #if defined(PETSC_HAVE_UNISTD_H)
82: #include <unistd.h>
83: #endif
84: #if defined(PETSC_HAVE_STDLIB_H)
85: #include <stdlib.h>
86: #endif
87: #if defined(PETSC_HAVE_SYS_PARAM_H)
88: #include <sys/param.h>
89: #endif
90: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
91: #include <sys/utsname.h>
92: #endif
93: #include <fcntl.h>
94: #include <time.h>
95: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
96: #include <sys/systeminfo.h>
97: #endif
98: #include "petscfix.h"
100: static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
101: static PetscInt Petsc_Shared_size = 100000000;
105: /*
106: Private routine to delete internal storage when a communicator is freed.
107: This is called by MPI, not by users.
109: The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
110: it was MPI_Comm *comm.
111: */
112: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
113: {
117: PetscFree(attr_val);
118: PetscFunctionReturn(MPI_SUCCESS);
119: }
123: PetscErrorCode PetscSharedMemorySetSize(PetscInt s)
124: {
126: Petsc_Shared_size = s;
127: return(0);
128: }
130: #include "petscfix.h"
132: #include <ulocks.h>
136: PetscErrorCode PetscSharedInitialize(MPI_Comm comm)
137: {
139: PetscMPIInt rank,flag;
140: char filename[PETSC_MAX_PATH_LEN];
141: usptr_t **arena;
145: if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
146: /*
147: The calling sequence of the 2nd argument to this function changed
148: between MPI Standard 1.0 and the revisions 1.1 Here we match the
149: new standard, if you are using an MPI implementation that uses
150: the older version you will get a warning message about the next line;
151: it is only a warning message and should do no harm.
152: */
153: MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DeleteShared,&Petsc_Shared_keyval,0);
154: }
156: MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);
158: if (!flag) {
159: /* This communicator does not yet have a shared memory areana */
160: PetscMalloc(sizeof(usptr_t*),&arena);
162: MPI_Comm_rank(comm,&rank);
163: if (!rank) {
164: PetscStrcpy(filename,"/tmp/PETScArenaXXXXXX");
165: #ifdef PETSC_HAVE_MKSTEMP
166: if (mkstemp(filename) < 0) {
167: SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
168: }
169: #else
170: if (!mktemp(filename)) {
171: SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
172: }
173: #endif
174: }
175: MPI_Bcast(filename,PETSC_MAX_PATH_LEN,MPI_CHAR,0,comm);
176: PetscOptionsGetInt(PETSC_NULL,"-shared_size",&Petsc_Shared_size,&flag);
177: usconfig(CONF_INITSIZE,Petsc_Shared_size);
178: *arena = usinit(filename);
179: MPI_Attr_put(comm,Petsc_Shared_keyval,arena);
180: }
182: return(0);
183: }
187: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
188: {
189: char *value;
191: PetscInt shift;
192: PetscMPIInt rank,flag;
193: usptr_t **arena;
196: *result = 0;
197: if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
198: PetscSharedInitialize(comm);
199: }
200: MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);
201: if (!flag) {
202: PetscSharedInitialize(comm);
203: MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);
204: if (!flag) SETERRQ(PETSC_ERR_LIB,"Unable to initialize shared memory");
205: }
207: MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
208: shift -= llen;
210: MPI_Comm_rank(comm,&rank);
211: if (!rank) {
212: value = (char*)usmalloc((size_t) len,*arena);
213: if (!value) {
214: (*PetscErrorPrintf)("Unable to allocate shared memory location\n");
215: (*PetscErrorPrintf)("Run with option -shared_size <size> \n");
216: (*PetscErrorPrintf)("with size > %d \n",(int)(1.2*(Petsc_Shared_size+len)));
217: SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
218: }
219: }
220: MPI_Bcast(&value,8,MPI_BYTE,0,comm);
221: value += shift;
223: return(0);
224: }
226: #else
235: PetscErrorCode VecCreate_Shared(Vec vv)
236: {
238: PetscMPIInt size;
241: MPI_Comm_size(vv->comm,&size);
242: if (size > 1) {
243: SETERRQ(PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
244: }
245: VecCreate_Seq(vv);
246: return(0);
247: }
250: #endif
254: /*@C
255: VecCreateShared - Creates a parallel vector that uses shared memory.
257: Input Parameters:
258: . comm - the MPI communicator to use
259: . n - local vector length (or PETSC_DECIDE to have calculated if N is given)
260: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
262: Output Parameter:
263: . vv - the vector
265: Collective on MPI_Comm
266:
267: Notes:
268: Currently VecCreateShared() is available only on the SGI; otherwise,
269: this routine is the same as VecCreateMPI().
271: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
272: same type as an existing vector.
274: Level: advanced
276: Concepts: vectors^creating with shared memory
278: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
279: VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
281: @*/
282: PetscErrorCode VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
283: {
287: VecCreate(comm,v);
288: VecSetSizes(*v,n,N);
289: VecSetType(*v,VECSHARED);
290: return(0);
291: }