Actual source code: mpi.c

petsc-3.8.4 2018-03-24
Report Typos and Errors
  1: /*
  2:       This provides a few of the MPI-uni functions that cannot be implemented
  3:     with C macros
  4: */
  5:  #include <petscsys.h>
  7: #error "Wrong mpi.h included! require mpi.h from MPIUNI"
  8: #endif
  9: #if !defined(PETSC_STDCALL)
 10: #define PETSC_STDCALL
 11: #endif

 13: #define MPI_SUCCESS 0
 14: #define MPI_FAILURE 1

 16: void *MPIUNI_TMP = NULL;

 18: /*
 19:        With MPI Uni there are exactly four distinct communicators:
 20:     MPI_COMM_SELF, MPI_COMM_WORLD, and a MPI_Comm_dup() of each of these (duplicates of duplicates return the same communictor)

 22:     MPI_COMM_SELF and MPI_COMM_WORLD are MPI_Comm_free() in MPI_Finalize() but in general with PETSc,
 23:      the other communicators are freed once the last PETSc object is freed (before MPI_Finalize()).

 25: */
 26: #define MAX_ATTR 128
 27: #define MAX_COMM 128

 29: static int MaxComm = 2;

 31: typedef struct {
 32:   void *attribute_val;
 33:   int  active;
 34: } MPI_Attr;

 36: typedef struct {
 37:   void                *extra_state;
 38:   MPI_Delete_function *del;
 39: } MPI_Attr_keyval;

 41: static MPI_Attr_keyval attr_keyval[MAX_ATTR];
 42: static MPI_Attr        attr[MAX_COMM][MAX_ATTR];
 43: static int             comm_active[MAX_COMM];
 44: static int             num_attr = 1,mpi_tag_ub = 100000000;
 45: static void*           MPIUNIF_mpi_in_place = 0;

 47: #if defined(__cplusplus)
 48: extern "C" {
 49: #endif

 51: /*
 52:    To avoid problems with prototypes to the system memcpy() it is duplicated here
 53: */
 54: int MPIUNI_Memcpy(void *a,const void *b,int n)
 55: {
 56:   int  i;
 57:   char *aa= (char*)a;
 58:   char *bb= (char*)b;

 60:   if (a == MPI_IN_PLACE || a == MPIUNIF_mpi_in_place) return MPI_SUCCESS;
 61:   if (b == MPI_IN_PLACE || b == MPIUNIF_mpi_in_place) return MPI_SUCCESS;
 62:   for (i=0; i<n; i++) aa[i] = bb[i];
 63:   return MPI_SUCCESS;
 64: }

 66: int MPI_Type_get_envelope(MPI_Datatype datatype,int *num_integers,int *num_addresses,int *num_datatypes,int *combiner)
 67: {
 68:   int comb = datatype >> 28;
 69:   switch (comb) {
 70:   case MPI_COMBINER_NAMED:
 71:     *num_integers = 0;
 72:     *num_addresses = 0;
 73:     *num_datatypes = 0;
 74:     *combiner = comb;
 75:     break;
 76:   case MPI_COMBINER_DUP:
 77:     *num_integers = 0;
 78:     *num_addresses = 0;
 79:     *num_datatypes = 1;
 80:     *combiner = comb;
 81:     break;
 82:   case MPI_COMBINER_CONTIGUOUS:
 83:     *num_integers = 1;
 84:     *num_addresses = 0;
 85:     *num_datatypes = 1;
 86:     *combiner = comb;
 87:     break;
 88:   default:
 89:     return MPIUni_Abort(MPI_COMM_SELF,1);
 90:   }
 91:   return MPI_SUCCESS;
 92: }

 94: int MPI_Type_get_contents(MPI_Datatype datatype,int max_integers,int max_addresses,int max_datatypes,int *array_of_integers,MPI_Aint *array_of_addresses,MPI_Datatype *array_of_datatypes)
 95: {
 96:   int comb = datatype >> 28;
 97:   switch (comb) {
 98:   case MPI_COMBINER_NAMED:
 99:     return MPIUni_Abort(MPI_COMM_SELF,1);
100:     break;
101:   case MPI_COMBINER_DUP:
102:     if (max_datatypes < 1) return MPIUni_Abort(MPI_COMM_SELF,1);
103:     array_of_datatypes[0] = datatype & 0x0fffffff;
104:     break;
105:   case MPI_COMBINER_CONTIGUOUS:
106:     if (max_integers < 1 || max_datatypes < 1) return MPIUni_Abort(MPI_COMM_SELF,1);
107:     array_of_integers[0] = (datatype >> 8) & 0xfff; /* count */
108:     array_of_datatypes[0] = (datatype & 0x0ff000ff) | 0x100;  /* basic named type (count=1) from which the contiguous type is derived */
109:     break;
110:   default:
111:     return MPIUni_Abort(MPI_COMM_SELF,1);
112:   }
113:   return MPI_SUCCESS;
114: }

116: /*
117:    Used to set the built-in MPI_TAG_UB attribute
118: */
119: static int Keyval_setup(void)
120: {
121:   attr[MPI_COMM_WORLD-1][0].active        = 1;
122:   attr[MPI_COMM_WORLD-1][0].attribute_val = &mpi_tag_ub;
123:   attr[MPI_COMM_SELF-1][0].active         = 1;
124:   attr[MPI_COMM_SELF-1][0].attribute_val  = &mpi_tag_ub;
125:   return MPI_SUCCESS;
126: }

128: int MPI_Keyval_create(MPI_Copy_function *copy_fn,MPI_Delete_function *delete_fn,int *keyval,void *extra_state)
129: {
130:   if (num_attr >= MAX_ATTR) return MPIUni_Abort(MPI_COMM_WORLD,1);

132:   attr_keyval[num_attr].extra_state = extra_state;
133:   attr_keyval[num_attr].del         = delete_fn;
134:   *keyval                           = num_attr++;
135:   return MPI_SUCCESS;
136: }

138: int MPI_Keyval_free(int *keyval)
139: {
140:   attr_keyval[*keyval].extra_state = 0;
141:   attr_keyval[*keyval].del         = 0;

143:   *keyval = 0;
144:   return MPI_SUCCESS;
145: }

147: int MPI_Attr_put(MPI_Comm comm,int keyval,void *attribute_val)
148: {
149:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
150:   attr[comm-1][keyval].active        = 1;
151:   attr[comm-1][keyval].attribute_val = attribute_val;
152:   return MPI_SUCCESS;
153: }

155: int MPI_Attr_delete(MPI_Comm comm,int keyval)
156: {
157:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
158:   if (attr[comm-1][keyval].active && attr_keyval[keyval].del) {
159:     void *save_attribute_val = attr[comm-1][keyval].attribute_val;
160:     attr[comm-1][keyval].active        = 0;
161:     attr[comm-1][keyval].attribute_val = 0;
162:     (*(attr_keyval[keyval].del))(comm,keyval,save_attribute_val,attr_keyval[keyval].extra_state);
163:   }
164:   return MPI_SUCCESS;
165: }

167: int MPI_Attr_get(MPI_Comm comm,int keyval,void *attribute_val,int *flag)
168: {
169:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
170:   if (!keyval) Keyval_setup();
171:   *flag                  = attr[comm-1][keyval].active;
172:   *(void**)attribute_val = attr[comm-1][keyval].attribute_val;
173:   return MPI_SUCCESS;
174: }

176: int MPI_Comm_create(MPI_Comm comm,MPI_Group group,MPI_Comm *newcomm)
177: {
178:   int j;
179:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
180:   for (j=3; j<MaxComm; j++) {
181:     if (!comm_active[j-1]) {
182:       comm_active[j-1] = 1;
183:       *newcomm = j;
184:       return MPI_SUCCESS;
185:     }
186:   }
187:   if (MaxComm > MAX_COMM) return MPI_FAILURE;
188:   *newcomm =  MaxComm++;
189:   comm_active[*newcomm-1] = 1;
190:   return MPI_SUCCESS;
191: }

193: int MPI_Comm_dup(MPI_Comm comm,MPI_Comm *out)
194: {
195:   int j;
196:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
197:   for (j=3; j<MaxComm; j++) {
198:     if (!comm_active[j-1]) {
199:       comm_active[j-1] = 1;
200:       *out = j;
201:       return MPI_SUCCESS;
202:     }
203:   }
204:   if (MaxComm > MAX_COMM) return MPI_FAILURE;
205:   *out = MaxComm++;
206:   comm_active[*out-1] = 1;
207:   return MPI_SUCCESS;
208: }

210: int MPI_Comm_free(MPI_Comm *comm)
211: {
212:   int i;

214:   if (*comm-1 < 0 || *comm-1 > MaxComm) return MPI_FAILURE;
215:   for (i=0; i<num_attr; i++) {
216:     if (attr[*comm-1][i].active && attr_keyval[i].del) (*attr_keyval[i].del)(*comm,i,attr[*comm-1][i].attribute_val,attr_keyval[i].extra_state);
217:     attr[*comm-1][i].active        = 0;
218:     attr[*comm-1][i].attribute_val = 0;
219:   }
220:   if (*comm >= 3) comm_active[*comm-1] = 0;
221:   *comm = 0;
222:   return MPI_SUCCESS;
223: }

225: int MPI_Comm_size(MPI_Comm comm, int *size)
226: {
227:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
228:   *size=1;
229:   return MPI_SUCCESS;
230: }

232: int MPI_Comm_rank(MPI_Comm comm, int *rank)
233: {
234:   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
235:   *rank=0;
236:   return MPI_SUCCESS;
237: }

239: int MPIUni_Abort(MPI_Comm comm,int errorcode)
240: {
241:   printf("MPI operation not supported by PETSc's sequential MPI wrappers\n");
242:   return MPI_FAILURE;
243: }

245: int MPI_Abort(MPI_Comm comm,int errorcode)
246: {
247:   abort();
248:   return MPI_SUCCESS;
249: }

251: /* --------------------------------------------------------------------------*/

253: static int MPI_was_initialized = 0;
254: static int MPI_was_finalized   = 0;

256: int MPI_Init(int *argc, char ***argv)
257: {
258:   if (MPI_was_initialized) return MPI_FAILURE;
259:   if (MPI_was_finalized) return MPI_FAILURE;
260:   MPI_was_initialized = 1;
261:   return MPI_SUCCESS;
262: }

264: int MPI_Finalize(void)
265: {
266:   MPI_Comm comm;
267:   if (MPI_was_finalized) return MPI_FAILURE;
268:   if (!MPI_was_initialized) return MPI_FAILURE;
269:   comm = MPI_COMM_WORLD;
270:   MPI_Comm_free(&comm);
271:   comm = MPI_COMM_SELF;
272:   MPI_Comm_free(&comm);
273:   MPI_was_finalized = 1;
274:   return MPI_SUCCESS;
275: }

277: int MPI_Initialized(int *flag)
278: {
279:   *flag = MPI_was_initialized;
280:   return MPI_SUCCESS;
281: }

283: int MPI_Finalized(int *flag)
284: {
285:   *flag = MPI_was_finalized;
286:   return MPI_SUCCESS;
287: }

289: /* -------------------     Fortran versions of several routines ------------------ */

291: #if defined(PETSC_HAVE_FORTRAN_CAPS)
292: #define mpiunisetcommonblock_          MPIUNISETCOMMONBLOCK
293: #define mpiunisetfortranbasepointers_  MPIUNISETFORTRANBASEPOINTERS
294: #define petsc_mpi_init_                PETSC_MPI_INIT
295: #define petsc_mpi_finalize_            PETSC_MPI_FINALIZE
296: #define petsc_mpi_comm_size_           PETSC_MPI_COMM_SIZE
297: #define petsc_mpi_comm_rank_           PETSC_MPI_COMM_RANK
298: #define petsc_mpi_abort_               PETSC_MPI_ABORT
299: #define petsc_mpi_reduce_              PETSC_MPI_REDUCE
300: #define petsc_mpi_allreduce_           PETSC_MPI_ALLREDUCE
301: #define petsc_mpi_barrier_             PETSC_MPI_BARRIER
302: #define petsc_mpi_bcast_               PETSC_MPI_BCAST
303: #define petsc_mpi_gather_              PETSC_MPI_GATHER
304: #define petsc_mpi_allgather_           PETSC_MPI_ALLGATHER
305: #define petsc_mpi_comm_split_          PETSC_MPI_COMM_SPLIT
306: #define petsc_mpi_scan_                PETSC_MPI_SCAN
307: #define petsc_mpi_send_                PETSC_MPI_SEND
308: #define petsc_mpi_recv_                PETSC_MPI_RECV
309: #define petsc_mpi_reduce_scatter_      PETSC_MPI_REDUCE_SCATTER
310: #define petsc_mpi_irecv_               PETSC_MPI_IRECV
311: #define petsc_mpi_isend_               PETSC_MPI_ISEND
312: #define petsc_mpi_sendrecv_            PETSC_MPI_SENDRECV
313: #define petsc_mpi_test_                PETSC_MPI_TEST
314: #define petsc_mpi_waitall_             PETSC_MPI_WAITALL
315: #define petsc_mpi_waitany_             PETSC_MPI_WAITANY
316: #define petsc_mpi_allgatherv_          PETSC_MPI_ALLGATHERV
317: #define petsc_mpi_alltoallv_           PETSC_MPI_ALLTOALLV
318: #define petsc_mpi_comm_create_         PETSC_MPI_COMM_CREATE
319: #define petsc_mpi_address_             PETSC_MPI_ADDRESS
320: #define petsc_mpi_pack_                PETSC_MPI_PACK
321: #define petsc_mpi_unpack_              PETSC_MPI_UNPACK
322: #define petsc_mpi_pack_size_           PETSC_MPI_PACK_SIZE
323: #define petsc_mpi_type_struct_         PETSC_MPI_TYPE_STRUCT
324: #define petsc_mpi_type_commit_         PETSC_MPI_TYPE_COMMIT
325: #define petsc_mpi_wtime_               PETSC_MPI_WTIME
326: #define petsc_mpi_cancel_              PETSC_MPI_CANCEL
327: #define petsc_mpi_comm_dup_            PETSC_MPI_COMM_DUP
328: #define petsc_mpi_comm_free_           PETSC_MPI_COMM_FREE
329: #define petsc_mpi_get_count_           PETSC_MPI_GET_COUNT
330: #define petsc_mpi_get_processor_name_  PETSC_MPI_GET_PROCESSOR_NAME
331: #define petsc_mpi_initialized_         PETSC_MPI_INITIALIZED
332: #define petsc_mpi_iprobe_              PETSC_MPI_IPROBE
333: #define petsc_mpi_probe_               PETSC_MPI_PROBE
334: #define petsc_mpi_request_free_        PETSC_MPI_REQUEST_FREE
335: #define petsc_mpi_ssend_               PETSC_MPI_SSEND
336: #define petsc_mpi_wait_                PETSC_MPI_WAIT
337: #define petsc_mpi_comm_group_          PETSC_MPI_COMM_GROUP
338: #define petsc_mpi_exscan_              PETSC_MPI_EXSCAN
339: #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
340: #define mpiunisetcommonblock_          mpiunisetcommonblock
341: #define mpiunisetfortranbasepointers_  mpiunisetfortranbasepointers
342: #define petsc_mpi_init_                petsc_mpi_init
343: #define petsc_mpi_finalize_            petsc_mpi_finalize
344: #define petsc_mpi_comm_size_           petsc_mpi_comm_size
345: #define petsc_mpi_comm_rank_           petsc_mpi_comm_rank
346: #define petsc_mpi_abort_               petsc_mpi_abort
347: #define petsc_mpi_reduce_              petsc_mpi_reduce
348: #define petsc_mpi_allreduce_           petsc_mpi_allreduce
349: #define petsc_mpi_barrier_             petsc_mpi_barrier
350: #define petsc_mpi_bcast_               petsc_mpi_bcast
351: #define petsc_mpi_gather_              petsc_mpi_gather
352: #define petsc_mpi_allgather_           petsc_mpi_allgather
353: #define petsc_mpi_comm_split_          petsc_mpi_comm_split
354: #define petsc_mpi_scan_                petsc_mpi_scan
355: #define petsc_mpi_send_                petsc_mpi_send
356: #define petsc_mpi_recv_                petsc_mpi_recv
357: #define petsc_mpi_reduce_scatter_      petsc_mpi_reduce_scatter
358: #define petsc_mpi_irecv_               petsc_mpi_irecv
359: #define petsc_mpi_isend_               petsc_mpi_isend
360: #define petsc_mpi_sendrecv_            petsc_mpi_sendrecv
361: #define petsc_mpi_test_                petsc_mpi_test
362: #define petsc_mpi_waitall_             petsc_mpi_waitall
363: #define petsc_mpi_waitany_             petsc_mpi_waitany
364: #define petsc_mpi_allgatherv_          petsc_mpi_allgatherv
365: #define petsc_mpi_alltoallv_           petsc_mpi_alltoallv
366: #define petsc_mpi_comm_create_         petsc_mpi_comm_create
367: #define petsc_mpi_address_             petsc_mpi_address
368: #define petsc_mpi_pack_                petsc_mpi_pack
369: #define petsc_mpi_unpack_              petsc_mpi_unpack
370: #define petsc_mpi_pack_size_           petsc_mpi_pack_size
371: #define petsc_mpi_type_struct_         petsc_mpi_type_struct
372: #define petsc_mpi_type_commit_         petsc_mpi_type_commit
373: #define petsc_mpi_wtime_               petsc_mpi_wtime
374: #define petsc_mpi_cancel_              petsc_mpi_cancel
375: #define petsc_mpi_comm_dup_            petsc_mpi_comm_dup
376: #define petsc_mpi_comm_free_           petsc_mpi_comm_free
377: #define petsc_mpi_get_count_           petsc_mpi_get_count
378: #define petsc_mpi_get_processor_name_  petsc_mpi_get_processor_name
379: #define petsc_mpi_initialized_         petsc_mpi_initialized
380: #define petsc_mpi_iprobe_              petsc_mpi_iprobe
381: #define petsc_mpi_probe_               petsc_mpi_probe
382: #define petsc_mpi_request_free_        petsc_mpi_request_free
383: #define petsc_mpi_ssend_               petsc_mpi_ssend
384: #define petsc_mpi_wait_                petsc_mpi_wait
385: #define petsc_mpi_comm_group_          petsc_mpi_comm_group
386: #define petsc_mpi_exscan_              petsc_mpi_exscan
387: #endif

389: #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
390: #define petsc_mpi_init_                petsc_mpi_init__
391: #define petsc_mpi_finalize_            petsc_mpi_finalize__
392: #define petsc_mpi_comm_size_           petsc_mpi_comm_size__
393: #define petsc_mpi_comm_rank_           petsc_mpi_comm_rank__
394: #define petsc_mpi_abort_               petsc_mpi_abort__
395: #define petsc_mpi_reduce_              petsc_mpi_reduce__
396: #define petsc_mpi_allreduce_           petsc_mpi_allreduce__
397: #define petsc_mpi_barrier_             petsc_mpi_barrier__
398: #define petsc_mpi_bcast_               petsc_mpi_bcast__
399: #define petsc_mpi_gather_              petsc_mpi_gather__
400: #define petsc_mpi_allgather_           petsc_mpi_allgather__
401: #define petsc_mpi_comm_split_          petsc_mpi_comm_split__
402: #define petsc_mpi_scan_                petsc_mpi_scan__
403: #define petsc_mpi_send_                petsc_mpi_send__
404: #define petsc_mpi_recv_                petsc_mpi_recv__
405: #define petsc_mpi_reduce_scatter_      petsc_mpi_reduce_scatter__
406: #define petsc_mpi_irecv_               petsc_mpi_irecv__
407: #define petsc_mpi_isend_               petsc_mpi_isend__
408: #define petsc_mpi_sendrecv_            petsc_mpi_sendrecv__
409: #define petsc_mpi_test_                petsc_mpi_test__
410: #define petsc_mpi_waitall_             petsc_mpi_waitall__
411: #define petsc_mpi_waitany_             petsc_mpi_waitany__
412: #define petsc_mpi_allgatherv_          petsc_mpi_allgatherv__
413: #define petsc_mpi_alltoallv_           petsc_mpi_alltoallv__
414: #define petsc_mpi_comm_create_         petsc_mpi_comm_create__
415: #define petsc_mpi_address_             petsc_mpi_address__
416: #define petsc_mpi_pack_                petsc_mpi_pack__
417: #define petsc_mpi_unpack_              petsc_mpi_unpack__
418: #define petsc_mpi_pack_size_           petsc_mpi_pack_size__
419: #define petsc_mpi_type_struct_         petsc_mpi_type_struct__
420: #define petsc_mpi_type_commit_         petsc_mpi_type_commit__
421: #define petsc_mpi_wtime_               petsc_mpi_wtime__
422: #define petsc_mpi_cancel_              petsc_mpi_cancel__
423: #define petsc_mpi_comm_dup_            petsc_mpi_comm_dup__
424: #define petsc_mpi_comm_free_           petsc_mpi_comm_free__
425: #define petsc_mpi_get_count_           petsc_mpi_get_count__
426: #define petsc_mpi_get_processor_name_  petsc_mpi_get_processor_name__
427: #define petsc_mpi_initialized_         petsc_mpi_initialized__
428: #define petsc_mpi_iprobe_              petsc_mpi_iprobe__
429: #define petsc_mpi_probe_               petsc_mpi_probe__
430: #define petsc_mpi_request_free_        petsc_mpi_request_free__
431: #define petsc_mpi_ssend_               petsc_mpi_ssend__
432: #define petsc_mpi_wait_                petsc_mpi_wait__
433: #define petsc_mpi_comm_group_          petsc_mpi_comm_group__
434: #define petsc_mpi_exscan_              petsc_mpi_exscan__
435: #endif

437: /* Do not build fortran interface if MPI namespace colision is to be avoided */
438: #if defined(PETSC_HAVE_FORTRAN)

440: PETSC_EXTERN void PETSC_STDCALL mpiunisetcommonblock_(void);

442: PETSC_EXTERN void PETSC_STDCALL mpiunisetfortranbasepointers_(void *f_mpi_in_place)
443: {
444:   MPIUNIF_mpi_in_place   = f_mpi_in_place;
445: }

447: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_init_(int *ierr)
448: {
449:   mpiunisetcommonblock_();
450:   *MPI_Init((int*)0, (char***)0);
451: }

453: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_finalize_(int *ierr)
454: {
455:   *MPI_Finalize();
456: }

458: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_size_(MPI_Comm *comm,int *size,int *ierr)
459: {
460:   *size = 1;
461:   *0;
462: }

464: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_rank_(MPI_Comm *comm,int *rank,int *ierr)
465: {
466:   *rank = 0;
467:   *MPI_SUCCESS;
468: }

470: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_split_(MPI_Comm *comm,int *color,int *key, MPI_Comm *newcomm, int *ierr)
471: {
472:   *newcomm = *comm;
473:   *MPI_SUCCESS;
474: }

476: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_abort_(MPI_Comm *comm,int *errorcode,int *ierr)
477: {
478:   abort();
479:   *MPI_SUCCESS;
480: }

482: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_reduce_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *root,int *comm,int *ierr)
483: {
484:   *MPI_Reduce(sendbuf,recvbuf,*count,*datatype,*op,*root,*comm);
485: }

487: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_allreduce_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
488: {
489:   *MPI_Allreduce(sendbuf,recvbuf,*count,*datatype,*op,*comm);
490: }

492: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_barrier_(MPI_Comm *comm,int *ierr)
493: {
494:   *MPI_SUCCESS;
495: }

497: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_bcast_(void *buf,int *count,int *datatype,int *root,int *comm,int *ierr)
498: {
499:   *MPI_SUCCESS;
500: }

502: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_gather_(void *sendbuf,int *scount,int *sdatatype, void *recvbuf, int *rcount, int *rdatatype, int *root,int *comm,int *ierr)
503: {
504:   *MPI_Gather(sendbuf,*scount,*sdatatype,recvbuf,rcount,rdatatype,*root,*comm);
505: }

507: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_allgather_(void *sendbuf,int *scount,int *sdatatype, void *recvbuf, int *rcount, int *rdatatype,int *comm,int *ierr)
508: {
509:   *MPI_Allgather(sendbuf,*scount,*sdatatype,recvbuf,rcount,rdatatype,*comm);
510: }

512: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_scan_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
513: {
514:   *MPIUNI_Memcpy(recvbuf,sendbuf,(*count)*MPI_sizeof(*datatype));
515: }

517: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_send_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *ierr)
518: {
519:   *MPIUni_Abort(MPI_COMM_WORLD,0);
520: }

522: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_recv_(void *buf,int *count,int *datatype,int *source,int *tag,int *comm,int status,int *ierr)
523: {
524:   *MPIUni_Abort(MPI_COMM_WORLD,0);
525: }

527: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_reduce_scatter_(void *sendbuf,void *recvbuf,int *recvcounts,int *datatype,int *op,int *comm,int *ierr)
528: {
529:   *MPIUni_Abort(MPI_COMM_WORLD,0);
530: }

532: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_irecv_(void *buf,int *count, int *datatype, int *source, int *tag, int *comm, int *request, int *ierr)
533: {
534:   *MPIUni_Abort(MPI_COMM_WORLD,0);
535: }

537: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_isend_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *request, int *ierr)
538: {
539:   *MPIUni_Abort(MPI_COMM_WORLD,0);
540: }

542: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_sendrecv_(void *sendbuf,int *sendcount,int *sendtype,int *dest,int *sendtag,void *recvbuf,int *recvcount,int *recvtype,int *source,int *recvtag,int *comm,int *status,int *ierr)
543: {
544:   *MPIUNI_Memcpy(recvbuf,sendbuf,(*sendcount)*MPI_sizeof(*sendtype));
545: }

547: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_test_(int *request,int *flag,int *status,int *ierr)
548: {
549:   *MPIUni_Abort(MPI_COMM_WORLD,0);
550: }

552: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_waitall_(int *count,int *array_of_requests,int *array_of_statuses,int *ierr)
553: {
554:   *MPI_SUCCESS;
555: }

557: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_waitany_(int *count,int *array_of_requests,int * index, int *status,int *ierr)
558: {
559:   *MPI_SUCCESS;
560: }

562: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_allgatherv_(void *sendbuf,int *sendcount,int *sendtype,void *recvbuf,int *recvcounts,int *displs,int *recvtype,int *comm,int *ierr)
563: {
564:   *MPI_Allgatherv(sendbuf,*sendcount,*sendtype,recvbuf,recvcounts,displs,*recvtype,*comm);
565: }

567: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_alltoallv_(void *sendbuf,int *sendcounts,int *sdispls,int *sendtype,void *recvbuf,int *recvcounts,int *rdispls,int *recvtype,int *comm,int *ierr)
568: {
569:   *MPI_Alltoallv(sendbuf,sendcounts,sdispls,*sendtype,recvbuf,recvcounts,rdispls,*recvtype,*comm);
570: }

572: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_create_(int *comm,int *group,int *newcomm,int *ierr)
573: {
574:   *newcomm =  *comm;
575:   *MPI_SUCCESS;
576: }

578: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_address_(void *location,MPI_Aint *address,int *ierr)
579: {
580:   *address =  (MPI_Aint) ((char *)location);
581:   *MPI_SUCCESS;
582: }

584: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_pack_(void *inbuf,int *incount,int *datatype,void *outbuf,int *outsize,int *position,int *comm,int *ierr)
585: {
586:   *MPIUni_Abort(MPI_COMM_WORLD,0);
587: }

589: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_unpack_(void *inbuf,int *insize,int *position,void *outbuf,int *outcount,int *datatype,int *comm,int *ierr)
590: {
591:   *MPIUni_Abort(MPI_COMM_WORLD,0);
592: }

594: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_pack_size_(int *incount,int *datatype,int *comm,int *size,int *ierr)
595: {
596:   *MPIUni_Abort(MPI_COMM_WORLD,0);
597: }

599: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_type_struct_(int *count,int *array_of_blocklengths,int * array_of_displaments,int *array_of_types,int *newtype,int *ierr)
600: {
601:   *MPIUni_Abort(MPI_COMM_WORLD,0);
602: }

604: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_type_commit_(int *datatype,int *ierr)
605: {
606:   *MPI_SUCCESS;
607: }

609: double PETSC_STDCALL petsc_mpi_wtime_(void)
610: {
611:   return 0.0;
612: }

614: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_cancel_(int *request,int *ierr)
615: {
616:   *MPI_SUCCESS;
617: }

619: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_dup_(int *comm,int *out,int *ierr)
620: {
621:   *out  = *comm;
622:   *MPI_SUCCESS;
623: }

625: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_free_(int *comm,int *ierr)
626: {
627:   *MPI_SUCCESS;
628: }

630: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_get_count_(int *status,int *datatype,int *count,int *ierr)
631: {
632:   *MPIUni_Abort(MPI_COMM_WORLD,0);
633: }

635: /* duplicate from fortranimpl.h */
636: #if defined(PETSC_HAVE_FORTRAN_MIXED_STR_ARG)
637: #define PETSC_MIXED_LEN(len) ,int len
638: #define PETSC_END_LEN(len)
639: #else
640: #define PETSC_MIXED_LEN(len)
641: #define PETSC_END_LEN(len)   ,int len
642: #endif

644: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_get_processor_name_(char *name PETSC_MIXED_LEN(len),int *result_len,int *ierr PETSC_END_LEN(len))
645: {
646:   MPIUNI_Memcpy(name,"localhost",9*sizeof(char));
647:   *result_len = 9;
648:   *MPI_SUCCESS;
649: }

651: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_initialized_(int *flag,int *ierr)
652: {
653:   *flag = MPI_was_initialized;
654:   *MPI_SUCCESS;
655: }

657: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_iprobe_(int *source,int *tag,int *comm,int *glag,int *status,int *ierr)
658: {
659:   *MPI_SUCCESS;
660: }

662: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_probe_(int *source,int *tag,int *comm,int *flag,int *status,int *ierr)
663: {
664:   *MPI_SUCCESS;
665: }

667: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_request_free_(int *request,int *ierr)
668: {
669:   *MPI_SUCCESS;
670: }

672: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_ssend_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *ierr)
673: {
674:   *MPIUni_Abort(MPI_COMM_WORLD,0);
675: }

677: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_wait_(int *request,int *status,int *ierr)
678: {
679:   *MPI_SUCCESS;
680: }

682: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_comm_group_(int *comm,int *group,int *ierr)
683: {
684:   *MPI_SUCCESS;
685: }

687: PETSC_EXTERN void PETSC_STDCALL petsc_mpi_exscan_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
688: {
689:   *MPI_SUCCESS;
690: }

692: #endif /* PETSC_HAVE_FORTRAN */

694: #if defined(__cplusplus)
695: }
696: #endif