Actual source code: zstart.c
1: /*
2: This file contains Fortran stubs for PetscInitialize and Finalize.
3: */
5: /*
6: This is to prevent the Cray T3D version of MPI (University of Edinburgh)
7: from stupidly redefining MPI_INIT(). They put this in to detect errors
8: in C code,but here I do want to be calling the Fortran version from a
9: C subroutine.
10: */
11: #define T3DMPI_FORTRAN
12: #define T3EMPI_FORTRAN
14: #include src/fortran/custom/zpetsc.h
15: #include petscsys.h
19: #ifdef PETSC_HAVE_FORTRAN_CAPS
20: #define petscinitialize_ PETSCINITIALIZE
21: #define petscfinalize_ PETSCFINALIZE
22: #define petscend_ PETSCEND
23: #define petscsetcommworld_ PETSCSETCOMMWORLD
24: #define iargc_ IARGC
25: #define getarg_ GETARG
26: #define mpi_init_ MPI_INIT
27: #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
28: #define petscinitialize_ petscinitialize
29: #define petscfinalize_ petscfinalize
30: #define petscend_ petscend
31: #define petscsetcommworld_ petscsetcommworld
32: #define mpi_init_ mpi_init
33: #define iargc_ iargc
34: #define getarg_ getarg
35: #endif
37: #if defined(PETSC_HAVE_NAGF90)
38: #undef iargc_
39: #undef getarg_
40: #define iargc_ f90_unix_MP_iargc
41: #define getarg_ f90_unix_MP_getarg
42: #endif
43: #if defined(PETSC_USE_NARGS) /* Digital Fortran */
44: #undef iargc_
45: #undef getarg_
46: #define iargc_ NARGS
47: #define getarg_ GETARG
48: #elif defined (PETSC_HAVE_PXFGETARG_NEW) /* cray x1 */
49: #undef iargc_
50: #undef getarg_
51: #define iargc_ ipxfargc_
52: #define getarg_ pxfgetarg_
53: #endif
54: #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
55: #undef iargc_
56: #undef getarg_
57: #define iargc iargc_
58: #define getarg getarg_
59: #endif
61: /*
62: The extra _ is because the f2c compiler puts an
63: extra _ at the end if the original routine name
64: contained any _.
65: */
66: #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
67: #define mpi_init_ mpi_init__
68: #endif
73: /*
74: Different Fortran compilers handle command lines in different ways
75: */
76: #if defined(PETSC_USE_NARGS)
80: #elif defined(PETSC_HAVE_FORTRAN_STDCALL)
84: #elif defined (PETSC_HAVE_PXFGETARG_NEW)
88: #else
91: /*
92: The Cray T3D/T3E use the PXFGETARG() function
93: */
94: #if defined(PETSC_HAVE_PXFGETARG)
96: #endif
97: #endif
100: #if defined(PETSC_USE_COMPLEX)
106: #endif
113: EXTERN PetscErrorCode PetscOptionsCheckInitial_Private(void);
114: EXTERN PetscErrorCode PetscOptionsCheckInitial_Components(void);
115: EXTERN PetscErrorCode PetscInitialize_DynamicLibraries(void);
116: EXTERN PetscErrorCode PetscLogBegin_Private(void);
118: /*
119: Reads in Fortran command line argments and sends them to
120: all processors and adds them to Options database.
121: */
123: PetscErrorCode PETScParseFortranArgs_Private(int *argc,char ***argv)
124: {
125: #if defined (PETSC_USE_NARGS)
126: short i,flg;
127: #else
128: int i;
129: #endif
131: int warg = 256;
132: PetscMPIInt rank;
133: char *p;
135: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
136: if (!rank) {
137: #if defined (PETSC_HAVE_IARG_COUNT_PROGNAME)
138: *argc = iargc_();
139: #else
140: /* most compilers do not count the program name for argv[0] */
141: *argc = 1 + iargc_();
142: #endif
143: }
144: MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);
146: PetscMalloc((*argc+1)*(warg*sizeof(char)+sizeof(char*)),argv);
147: (*argv)[0] = (char*)(*argv + *argc + 1);
149: if (!rank) {
150: PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));
151: for (i=0; i<*argc; i++) {
152: (*argv)[i+1] = (*argv)[i] + warg;
153: #if defined(PETSC_HAVE_PXFGETARG)
154: {char *tmp = (*argv)[i];
155: int ilen;
156: PXFGETARG(&i,_cptofcd(tmp,warg),&ilen,&ierr);
157: tmp[ilen] = 0;
158: }
159: #elif defined (PETSC_HAVE_PXFGETARG_NEW)
160: {char *tmp = (*argv)[i];
161: int ilen;
162: getarg_(&i,tmp,&ilen,&ierr,warg);
163: tmp[ilen] = 0;
164: }
165: #elif defined (PETSC_USE_NARGS)
166: GETARG(&i,(*argv)[i],warg,&flg);
167: #else
168: getarg_(&i,(*argv)[i],warg);
169: #endif
170: /* zero out garbage at end of each argument */
171: p = (*argv)[i] + warg-1;
172: while (p > (*argv)[i]) {
173: if (*p == ' ') *p = 0;
174: p--;
175: }
176: }
177: }
178: MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);
179: if (rank) {
180: for (i=0; i<*argc; i++) {
181: (*argv)[i+1] = (*argv)[i] + warg;
182: }
183: }
184: return 0;
185: }
187: /* -----------------------------------------------------------------------------------------------*/
198: /*
199: petscinitialize - Version called from Fortran.
201: Notes:
202: Since this is called from Fortran it does not return error codes
203:
204: */
205: void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
206: {
207: #if defined (PETSC_USE_NARGS)
208: short flg,i;
209: #else
210: int i,j;
211: #endif
212: int flag,argc = 0;
213: PetscMPIInt dummy_tag,size;
214: char **args = 0,*t1,name[256],hostname[64];
215:
216: *1;
217: *PetscMemzero(name,256); if (*ierr) return;
218: if (PetscInitializeCalled) {*0; return;}
219:
220: *PetscOptionsCreate();
221: if (*ierr) return;
222: i = 0;
223: #if defined(PETSC_HAVE_PXFGETARG)
224: { int ilen,sierr;
225: PXFGETARG(&i,_cptofcd(name,256),&ilen,&sierr);
226: if (sierr) {
227: *sierr;
228: return;
229: }
230: name[ilen] = 0;
231: }
232: #elif defined (PETSC_HAVE_PXFGETARG_NEW)
233: { int ilen,sierr;
234: getarg_(&i,name,&ilen,&sierr,256);
235: if (sierr) {
236: *sierr;
237: return;
238: }
239: name[ilen] = 0;
240: }
241: #elif defined (PETSC_USE_NARGS)
242: GETARG(&i,name,256,&flg);
243: #else
244: getarg_(&i,name,256);
245: /* Eliminate spaces at the end of the string */
246: for (j=254; j>=0; j--) {
247: if (name[j] != ' ') {
248: name[j+1] = 0;
249: break;
250: }
251: }
252: #endif
253: *PetscSetProgramName(name);
254: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscSetProgramName()");return;}
256: MPI_Initialized(&flag);
257: if (!flag) {
258: PetscMPIInt mierr;
259: mpi_init_(&mierr);
260: if (mierr) {
261: *mierr;
262: (*PetscErrorPrintf)("PetscInitialize: Calling Fortran MPI_Init()");
263: return;
264: }
265: PetscBeganMPI = PETSC_TRUE;
266: }
267: PetscInitializeCalled = PETSC_TRUE;
269: *PetscErrorPrintfInitialize();
270: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Calling PetscErrorPrintfInitialize()");return;}
272: if (!PETSC_COMM_WORLD) {
273: PETSC_COMM_WORLD = MPI_COMM_WORLD;
274: }
276: *MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
277: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalRank");return;}
278: *MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
279: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: Setting PetscGlobalSize");return;}
281: #if defined(PETSC_USE_COMPLEX)
282: /*
283: Initialized the global variable; this is because with
284: shared libraries the constructors for global variables
285: are not called; at least on IRIX.
286: */
287: {
288: PetscScalar ic(0.0,1.0);
289: PETSC_i = ic;
290: }
291: *MPI_Type_contiguous(2,MPIU_REAL,&MPIU_COMPLEX);
292: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
293: *MPI_Type_commit(&MPIU_COMPLEX);
294: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
295: *MPI_Op_create(PetscSum_Local,1,&PetscSum_Op);
296: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops");return;}
297: #endif
299: /*
300: Create the PETSc MPI reduction operator that sums of the first
301: half of the entries and maxes the second half.
302: */
303: *MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
304: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops");return;}
306: *MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
307: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
308: *MPI_Type_commit(&MPIU_2SCALAR);
309: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
310: *MPI_Type_contiguous(2,MPIU_INT,&MPIU_2INT);
311: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
312: *MPI_Type_commit(&MPIU_2INT);
313: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI types");return;}
314: *MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
315: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops");return;}
316: *MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);
317: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating MPI ops");return;}
319: /*
320: PetscInitializeFortran() is called twice. Here it initializes
321: PETSC_NULLCHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
322: The PETSC_VIEWERs have not been created yet, so they must be initialized
323: below.
324: */
325: PetscInitializeFortran();
327: PETScParseFortranArgs_Private(&argc,&args);
328: FIXCHAR(filename,len,t1);
329: *PetscOptionsInsert(&argc,&args,t1);
330: FREECHAR(filename,t1);
331: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Creating options database");return;}
332: *PetscFree(args);
333: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Freeing args");return;}
334: *PetscOptionsCheckInitial_Private();
335: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options");return;}
336: *PetscLogBegin_Private();
337: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize: intializing logging");return;}
338: /*
339: Initialize PETSC_COMM_SELF as a MPI_Comm with the PETSc attribute.
340: */
341: *PetscCommDuplicate(MPI_COMM_SELF,&PETSC_COMM_SELF,&dummy_tag);
342: if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up PETSC_COMM_SELF");return;}
343: *PetscCommDuplicate(PETSC_COMM_WORLD,&PETSC_COMM_WORLD,&dummy_tag);
344: if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up PETSC_COMM_WORLD");return;}
345: *PetscInitialize_DynamicLibraries();
346: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Initializing dynamic libraries");return;}
348: *PetscInitializeFortran();
349: if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Setting up common block");return;}
351: *MPI_Comm_size(PETSC_COMM_WORLD,&size);
352: if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting MPI_Comm_size()");return;}
353: PetscLogInfo(0,"PetscInitialize(Fortran):PETSc successfully started: procs %d\n",size);
354: *PetscGetHostName(hostname,64);
355: if (*ierr) { (*PetscErrorPrintf)("PetscInitialize:Getting hostname");return;}
356: PetscLogInfo(0,"Running on machine: %s\n",hostname);
357:
358: *PetscOptionsCheckInitial_Components();
359: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:Checking initial options");return;}
360: }
362: void PETSC_STDCALL petscfinalize_(PetscErrorCode *ierr)
363: {
364: #if defined(PETSC_HAVE_SUNMATHPRO)
366: standard_arithmetic();
367: #endif
369: *PetscFinalize();
370: }
372: void PETSC_STDCALL petscend_(PetscErrorCode *ierr)
373: {
374: #if defined(PETSC_HAVE_SUNMATHPRO)
376: standard_arithmetic();
377: #endif
379: *PetscEnd();
380: }
382: void PETSC_STDCALL petscsetcommworld_(MPI_Comm *comm,PetscErrorCode *ierr)
383: {
384: *PetscSetCommWorld((MPI_Comm)PetscToPointerComm(*comm));
385: }