Actual source code: zstart.c
1: /*$Id: zstart.c,v 1.84 2001/08/10 03:35:41 bsmith Exp $*/
3: /*
4: This file contains Fortran stubs for PetscInitialize and Finalize.
5: */
7: /*
8: This is to prevent the Cray T3D version of MPI (University of Edinburgh)
9: from stupidly redefining MPI_INIT(). They put this in to detect errors
10: in C code,but here I do want to be calling the Fortran version from a
11: C subroutine.
12: */
13: #define T3DMPI_FORTRAN
14: #define T3EMPI_FORTRAN
16: #include src/fortran/custom/zpetsc.h
17: #include petscsys.h
19: extern PetscTruth PetscBeganMPI;
21: #ifdef PETSC_HAVE_FORTRAN_CAPS
22: #define petscinitialize_ PETSCINITIALIZE
23: #define petscfinalize_ PETSCFINALIZE
24: #define petscend_ PETSCEND
25: #define petscsetcommworld_ PETSCSETCOMMWORLD
26: #define iargc_ IARGC
27: #define getarg_ GETARG
28: #define mpi_init_ MPI_INIT
29: #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
30: #define petscinitialize_ petscinitialize
31: #define petscfinalize_ petscfinalize
32: #define petscend_ petscend
33: #define petscsetcommworld_ petscsetcommworld
34: #define mpi_init_ mpi_init
35: #define iargc_ iargc
36: #define getarg_ getarg
37: #endif
39: #if defined(PETSC_HAVE_NAGF90)
40: #undef iargc_
41: #undef getarg_
42: #define iargc_ f90_unix_MP_iargc
43: #define getarg_ f90_unix_MP_getarg
44: #endif
45: #if defined(PETSC_USE_NARGS) /* Digital Fortran */
46: #undef iargc_
47: #undef getarg_
48: #define iargc_ NARGS
49: #define getarg_ GETARG
50: #endif
51: #if defined(PETSC_HAVE_FORTRAN_IARGC_UNDERSCORE) /* HPUX + no underscore */
52: #undef iargc_
53: #undef getarg_
54: #define iargc iargc_
55: #define getarg getarg_
56: #endif
58: /*
59: The extra _ is because the f2c compiler puts an
60: extra _ at the end if the original routine name
61: contained any _.
62: */
63: #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
64: #define mpi_init_ mpi_init__
65: #endif
67: EXTERN_C_BEGIN
68: extern void PETSC_STDCALL mpi_init_(int*);
70: /*
71: Different Fortran compilers handle command lines in different ways
72: */
73: #if defined(PETSC_USE_NARGS)
74: extern short __stdcall NARGS();
75: extern void __stdcall GETARG(short*,char*,int,short *);
77: #else
78: extern int iargc_();
79: extern void getarg_(int*,char*,int);
80: /*
81: The Cray T3D/T3E use the PXFGETARG() function
82: */
83: #if defined(PETSC_HAVE_PXFGETARG)
84: extern void PXFGETARG(int *,_fcd,int*,int*);
85: #endif
86: #endif
87: EXTERN_C_END
89: #if defined(PETSC_USE_COMPLEX)
90: extern MPI_Op PetscSum_Op;
92: EXTERN_C_BEGIN
93: extern void PetscSum_Local(void *,void *,int *,MPI_Datatype *);
94: EXTERN_C_END
95: #endif
96: extern MPI_Op PetscMaxSum_Op;
98: EXTERN_C_BEGIN
99: extern void PetscMaxSum_Local(void *,void *,int *,MPI_Datatype *);
100: EXTERN_C_END
102: EXTERN int PetscOptionsCheckInitial(void);
103: EXTERN int PetscOptionsCheckInitial_Components(void);
104: EXTERN int PetscInitialize_DynamicLibraries(void);
105: EXTERN int PetscLogBegin_Private(void);
107: /*
108: Reads in Fortran command line argments and sends them to
109: all processors and adds them to Options database.
110: */
112: int PETScParseFortranArgs_Private(int *argc,char ***argv)
113: {
114: #if defined (PETSC_USE_NARGS)
115: short i,flg;
116: #else
117: int i;
118: #endif
119: int warg = 256,rank,ierr;
120: char *p;
122: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
123: if (!rank) {
124: #if defined (PETSC_HAVE_IARG_COUNT_PROGNAME)
125: *argc = iargc_();
126: #else
127: /* most compilers do not count the program name for argv[0] */
128: *argc = 1 + iargc_();
129: #endif
130: }
131: MPI_Bcast(argc,1,MPI_INT,0,PETSC_COMM_WORLD);
133: PetscMalloc((*argc+1)*(warg*sizeof(char)+sizeof(char*)),argv);
134: (*argv)[0] = (char*)(*argv + *argc + 1);
136: if (!rank) {
137: PetscMemzero((*argv)[0],(*argc)*warg*sizeof(char));
138: for (i=0; i<*argc; i++) {
139: (*argv)[i+1] = (*argv)[i] + warg;
140: #if defined(PETSC_HAVE_PXFGETARG)
141: {char *tmp = (*argv)[i];
142: int ierr,ilen;
143: PXFGETARG(&i,_cptofcd(tmp,warg),&ilen,&ierr);
144: tmp[ilen] = 0;
145: }
146: #elif defined (PETSC_USE_NARGS)
147: GETARG(&i,(*argv)[i],warg,&flg);
148: #else
149: getarg_(&i,(*argv)[i],warg);
150: #endif
151: /* zero out garbage at end of each argument */
152: p = (*argv)[i] + warg-1;
153: while (p > (*argv)[i]) {
154: if (*p == ' ') *p = 0;
155: p--;
156: }
157: }
158: }
159: MPI_Bcast((*argv)[0],*argc*warg,MPI_CHAR,0,PETSC_COMM_WORLD);
160: if (rank) {
161: for (i=0; i<*argc; i++) {
162: (*argv)[i+1] = (*argv)[i] + warg;
163: }
164: }
165: return 0;
166: }
168: /* -----------------------------------------------------------------------------------------------*/
171: EXTERN_C_BEGIN
172: /*
173: petscinitialize - Version called from Fortran.
175: Notes:
176: Since this is called from Fortran it does not return error codes
177:
178: */
179: void PETSC_STDCALL petscinitialize_(CHAR filename PETSC_MIXED_LEN(len),int *ierr PETSC_END_LEN(len))
180: {
181: #if defined (PETSC_USE_NARGS)
182: short flg,i;
183: #else
184: int i;
185: #endif
186: int j,flag,argc = 0,dummy_tag,size;
187: char **args = 0,*t1,name[256],hostname[64];
188:
189: *1;
190: *PetscMemzero(name,256); if (*ierr) return;
191: if (PetscInitializeCalled) {*0; return;}
192:
193: *PetscOptionsCreate();
194: if (*ierr) return;
195: i = 0;
196: #if defined(PETSC_HAVE_PXFGETARG)
197: { int ilen;
198: PXFGETARG(&i,_cptofcd(name,256),&ilen,ierr);
199: if (*ierr) return;
200: name[ilen] = 0;
201: }
202: #elif defined (PETSC_USE_NARGS)
203: GETARG(&i,name,256,&flg);
204: #else
205: getarg_(&i,name,256);
206: /* Eliminate spaces at the end of the string */
207: for (j=254; j>=0; j--) {
208: if (name[j] != ' ') {
209: name[j+1] = 0;
210: break;
211: }
212: }
213: #endif
214: *PetscSetProgramName(name);
215: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize: Calling PetscSetProgramName()");return;}
216: *PetscSetInitialDate();
217: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize: Calling PetscSetInitialDate()");return;}
219: MPI_Initialized(&flag);
220: if (!flag) {
221: mpi_init_(ierr);
222: if (*ierr) {(*PetscErrorPrintf)("PetscInitialize:");return;}
223: PetscBeganMPI = PETSC_TRUE;
224: }
225: PetscInitializeCalled = PETSC_TRUE;
227: if (!PETSC_COMM_WORLD) {
228: PETSC_COMM_WORLD = MPI_COMM_WORLD;
229: }
231: *MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
232: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize: Setting PetscGlobalRank");return;}
233: *MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);
234: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize: Setting PetscGlobalSize");return;}
236: #if defined(PETSC_USE_COMPLEX)
237: /*
238: Initialized the global variable; this is because with
239: shared libraries the constructors for global variables
240: are not called; at least on IRIX.
241: */
242: {
243: PetscScalar ic(0.0,1.0);
244: PETSC_i = ic;
245: }
246: MPI_Type_contiguous(2,MPIU_REAL,&MPIU_COMPLEX);
247: MPI_Type_commit(&MPIU_COMPLEX);
248: *MPI_Op_create(PetscSum_Local,1,&PetscSum_Op);
249: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Creating MPI ops");return;}
250: #endif
252: /*
253: Create the PETSc MPI reduction operator that sums of the first
254: half of the entries and maxes the second half.
255: */
256: *MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);
257: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Creating MPI ops");return;}
259: /*
260: PetscInitializeFortran() is called twice. Here it initializes
261: PETSC_NULLCHARACTER_Fortran. Below it initializes the PETSC_VIEWERs.
262: The PETSC_VIEWERs have not been created yet, so they must be initialized
263: below.
264: */
265: PetscInitializeFortran();
267: PETScParseFortranArgs_Private(&argc,&args);
268: FIXCHAR(filename,len,t1);
269: *PetscOptionsInsert(&argc,&args,t1);
270: FREECHAR(filename,t1);
271: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Creating options database");return;}
272: *PetscFree(args);
273: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Freeing args");return;}
274: *PetscOptionsCheckInitial();
275: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Checking initial options");return;}
276: *PetscLogBegin_Private();
277: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize: intializing logging");return;}
278: /*
279: Initialize PETSC_COMM_SELF as a MPI_Comm with the PETSc attribute.
280: */
281: *PetscCommDuplicate_Private(MPI_COMM_SELF,&PETSC_COMM_SELF,&dummy_tag);
282: if (*ierr) { (*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Setting up PETSC_COMM_SELF");return;}
283: *PetscCommDuplicate_Private(PETSC_COMM_WORLD,&PETSC_COMM_WORLD,&dummy_tag);
284: if (*ierr) { (*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Setting up PETSC_COMM_WORLD");return;}
285: *PetscInitialize_DynamicLibraries();
286: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Initializing dynamic libraries");return;}
288: *PetscInitializeFortran();
289: if (*ierr) { (*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Setting up common block");return;}
291: *MPI_Comm_size(PETSC_COMM_WORLD,&size);
292: if (*ierr) { (*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Getting MPI_Comm_size()");return;}
293: PetscLogInfo(0,"PetscInitialize(Fortran):PETSc successfully started: procs %dn",size);
294: *PetscGetHostName(hostname,64);
295: if (*ierr) { (*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Getting hostname");return;}
296: PetscLogInfo(0,"Running on machine: %sn",hostname);
297:
298: *PetscOptionsCheckInitial_Components();
299: if (*ierr) {(*PetscErrorPrintf)("PETSC ERROR: PetscInitialize:Checking initial options");return;}
301: }
303: void PETSC_STDCALL petscfinalize_(int *ierr)
304: {
305: #if defined(PETSC_HAVE_SUNMATHPRO)
306: extern void standard_arithmetic();
307: standard_arithmetic();
308: #endif
310: *PetscFinalize();
311: }
313: void PETSC_STDCALL petscend_(int *ierr)
314: {
315: #if defined(PETSC_HAVE_SUNMATHPRO)
316: extern void standard_arithmetic();
317: standard_arithmetic();
318: #endif
320: *PetscEnd();
321: }
323: void PETSC_STDCALL petscsetcommworld_(MPI_Comm *comm,int *ierr)
324: {
325: *PetscSetCommWorld((MPI_Comm)PetscToPointerComm(*comm));
326: }
327: EXTERN_C_END