Actual source code: ex10.c
petsc-3.13.6 2020-09-29
2: /*
3: Include "petscsnes.h" so that we can use SNES solvers. Note that this
4: file automatically includes:
5: petscsys.h - base PETSc routines petscvec.h - vectors
6: petscmat.h - matrices
7: petscis.h - index sets petscksp.h - Krylov subspace methods
8: petscviewer.h - viewers petscpc.h - preconditioners
9: petscksp.h - linear solvers
10: */
11: #include <petscsnes.h>
12: #include <petscao.h>
14: static char help[] = "An Unstructured Grid Example.\n\
15: This example demonstrates how to solve a nonlinear system in parallel\n\
16: with SNES for an unstructured mesh. The mesh and partitioning information\n\
17: is read in an Section 1.5 Writing Application Codes with PETSc defined ordering,which is later transformed\n\
18: into another convenient ordering (called the local ordering). The local\n\
19: ordering, apart from being efficient on cpu cycles and memory, allows\n\
20: the use of the SPMD model of parallel programming. After partitioning\n\
21: is done, scatters are created between local (sequential)and global\n\
22: (distributed) vectors. Finally, we set up the nonlinear solver context\n\
23: in the usual way as a structured grid (see\n\
24: petsc/src/snes/tutorials/ex5.c).\n\
25: This example also illustrates the use of parallel matrix coloring.\n\
26: The command line options include:\n\
27: -vert <Nv>, where Nv is the global number of nodes\n\
28: -elem <Ne>, where Ne is the global number of elements\n\
29: -nl_par <lambda>, where lambda is the multiplier for the non linear term (u*u) term\n\
30: -lin_par <alpha>, where alpha is the multiplier for the linear term (u)\n\
31: -fd_jacobian_coloring -mat_coloring_type lf\n";
33: /*T
34: Concepts: SNES^unstructured grid
35: Concepts: AO^Section 1.5 Writing Application Codes with PETSc to PETSc ordering or vice versa;
36: Concepts: VecScatter^using vector scatter operations;
37: Processors: n
38: T*/
42: /* ------------------------------------------------------------------------
44: PDE Solved : L(u) + lambda*u*u + alpha*u = 0 where L(u) is the Laplacian.
46: The Laplacian is approximated in the following way: each edge is given a weight
47: of one meaning that the diagonal term will have the weight equal to the degree
48: of a node. The off diagonal terms will get a weight of -1.
50: -----------------------------------------------------------------------*/
53: #define MAX_ELEM 500 /* Maximum number of elements */
54: #define MAX_VERT 100 /* Maximum number of vertices */
55: #define MAX_VERT_ELEM 3 /* Vertices per element */
57: /*
58: Application-defined context for problem specific data
59: */
60: typedef struct {
61: PetscInt Nvglobal,Nvlocal; /* global and local number of vertices */
62: PetscInt Neglobal,Nelocal; /* global and local number of vertices */
63: PetscInt AdjM[MAX_VERT][50]; /* adjacency list of a vertex */
64: PetscInt itot[MAX_VERT]; /* total number of neighbors for a vertex */
65: PetscInt icv[MAX_ELEM][MAX_VERT_ELEM]; /* vertices belonging to an element */
66: PetscInt v2p[MAX_VERT]; /* processor number for a vertex */
67: PetscInt *locInd,*gloInd; /* local and global orderings for a node */
68: Vec localX,localF; /* local solution (u) and f(u) vectors */
69: PetscReal non_lin_param; /* nonlinear parameter for the PDE */
70: PetscReal lin_param; /* linear parameter for the PDE */
71: VecScatter scatter; /* scatter context for the local and
72: distributed vectors */
73: } AppCtx;
75: /*
76: User-defined routines
77: */
78: PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*);
79: PetscErrorCode FormFunction(SNES,Vec,Vec,void*);
80: PetscErrorCode FormInitialGuess(AppCtx*,Vec);
82: int main(int argc,char **argv)
83: {
84: SNES snes; /* SNES context */
85: SNESType type = SNESNEWTONLS; /* default nonlinear solution method */
86: Vec x,r; /* solution, residual vectors */
87: Mat Jac; /* Jacobian matrix */
88: AppCtx user; /* user-defined Section 1.5 Writing Application Codes with PETSc context */
89: AO ao; /* Application Ordering object */
90: IS isglobal,islocal; /* global and local index sets */
91: PetscMPIInt rank,size; /* rank of a process, number of processors */
92: PetscInt rstart; /* starting index of PETSc ordering for a processor */
93: PetscInt nfails; /* number of unsuccessful Newton steps */
94: PetscInt bs = 1; /* block size for multicomponent systems */
95: PetscInt nvertices; /* number of local plus ghost nodes of a processor */
96: PetscInt *pordering; /* PETSc ordering */
97: PetscInt *vertices; /* list of all vertices (incl. ghost ones) on a processor */
98: PetscInt *verticesmask;
99: PetscInt *tmp;
100: PetscInt i,j,jstart,inode,nb,nbrs,Nvneighborstotal = 0;
101: PetscErrorCode ierr;
102: PetscInt its,N;
103: PetscScalar *xx;
104: char str[256],form[256],part_name[256];
105: FILE *fptr,*fptr1;
106: ISLocalToGlobalMapping isl2g;
107: int dtmp;
108: #if defined(UNUSED_VARIABLES)
109: PetscDraw draw; /* drawing context */
110: PetscScalar *ff,*gg;
111: PetscReal tiny = 1.0e-10,zero = 0.0,one = 1.0,big = 1.0e+10;
112: PetscInt *tmp1,*tmp2;
113: #endif
114: MatFDColoring matfdcoloring = 0;
115: PetscBool fd_jacobian_coloring = PETSC_FALSE;
117: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: Initialize program
119: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121: PetscInitialize(&argc,&argv,"options.inf",help);if (ierr) return ierr;
122: MPI_Comm_rank(MPI_COMM_WORLD,&rank);
123: MPI_Comm_size(MPI_COMM_WORLD,&size);
125: /* The current input file options.inf is for 2 proc run only */
126: if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This example currently runs on 2 procs only.");
128: /*
129: Initialize problem parameters
130: */
131: user.Nvglobal = 16; /*Global # of vertices */
132: user.Neglobal = 18; /*Global # of elements */
134: PetscOptionsGetInt(NULL,NULL,"-vert",&user.Nvglobal,NULL);
135: PetscOptionsGetInt(NULL,NULL,"-elem",&user.Neglobal,NULL);
137: user.non_lin_param = 0.06;
139: PetscOptionsGetReal(NULL,NULL,"-nl_par",&user.non_lin_param,NULL);
141: user.lin_param = -1.0;
143: PetscOptionsGetReal(NULL,NULL,"-lin_par",&user.lin_param,NULL);
145: user.Nvlocal = 0;
146: user.Nelocal = 0;
148: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: Read the mesh and partitioning information
150: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152: /*
153: Read the mesh and partitioning information from 'adj.in'.
154: The file format is as follows.
155: For each line the first entry is the processor rank where the
156: current node belongs. The second entry is the number of
157: neighbors of a node. The rest of the line is the adjacency
158: list of a node. Currently this file is set up to work on two
159: processors.
161: This is not a very good example of reading input. In the future,
162: we will put an example that shows the style that should be
163: used in a real Section 1.5 Writing Application Codes with PETSc, where partitioning will be done
164: dynamically by calling partitioning routines (at present, we have
165: a ready interface to ParMeTiS).
166: */
167: fptr = fopen("adj.in","r");
168: if (!fptr) SETERRQ(PETSC_COMM_SELF,0,"Could not open adj.in");
170: /*
171: Each processor writes to the file output.<rank> where rank is the
172: processor's rank.
173: */
174: sprintf(part_name,"output.%d",rank);
175: fptr1 = fopen(part_name,"w");
176: if (!fptr1) SETERRQ(PETSC_COMM_SELF,0,"Could no open output file");
177: PetscMalloc1(user.Nvglobal,&user.gloInd);
178: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Rank is %d\n",rank);
179: for (inode = 0; inode < user.Nvglobal; inode++) {
180: if (!fgets(str,256,fptr)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_READ,"fgets read failed");
181: sscanf(str,"%d",&dtmp);user.v2p[inode] = dtmp;
182: if (user.v2p[inode] == rank) {
183: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Node %D belongs to processor %D\n",inode,user.v2p[inode]);
185: user.gloInd[user.Nvlocal] = inode;
186: sscanf(str,"%*d %d",&dtmp);
187: nbrs = dtmp;
188: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Number of neighbors for the vertex %D is %D\n",inode,nbrs);
190: user.itot[user.Nvlocal] = nbrs;
191: Nvneighborstotal += nbrs;
192: for (i = 0; i < user.itot[user.Nvlocal]; i++) {
193: form[0]='\0';
194: for (j=0; j < i+2; j++) {
195: PetscStrlcat(form,"%*d ",sizeof(form));
196: }
197: PetscStrlcat(form,"%d",sizeof(form));
199: sscanf(str,form,&dtmp);
200: user.AdjM[user.Nvlocal][i] = dtmp;
202: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[user.Nvlocal][i]);
203: }
204: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
205: user.Nvlocal++;
206: }
207: }
208: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Total # of Local Vertices is %D \n",user.Nvlocal);
210: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: Create different orderings
212: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
214: /*
215: Create the local ordering list for vertices. First a list using the PETSc global
216: ordering is created. Then we use the AO object to get the PETSc-to-Section 1.5 Writing Application Codes with PETSc and
217: Section 1.5 Writing Application Codes with PETSc-to-PETSc mappings. Each vertex also gets a local index (stored in the
218: locInd array).
219: */
220: MPI_Scan(&user.Nvlocal,&rstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
221: rstart -= user.Nvlocal;
222: PetscMalloc1(user.Nvlocal,&pordering);
224: for (i=0; i < user.Nvlocal; i++) pordering[i] = rstart + i;
226: /*
227: Create the AO object
228: */
229: AOCreateBasic(MPI_COMM_WORLD,user.Nvlocal,user.gloInd,pordering,&ao);
230: PetscFree(pordering);
232: /*
233: Keep the global indices for later use
234: */
235: PetscMalloc1(user.Nvlocal,&user.locInd);
236: PetscMalloc1(Nvneighborstotal,&tmp);
238: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
239: Demonstrate the use of AO functionality
240: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
242: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Before AOApplicationToPetsc, local indices are : \n");
243: for (i=0; i < user.Nvlocal; i++) {
244: PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.gloInd[i]);
246: user.locInd[i] = user.gloInd[i];
247: }
248: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
249: jstart = 0;
250: for (i=0; i < user.Nvlocal; i++) {
251: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.gloInd[i]);
252: for (j=0; j < user.itot[i]; j++) {
253: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
255: tmp[j + jstart] = user.AdjM[i][j];
256: }
257: jstart += user.itot[i];
258: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
259: }
261: /*
262: Now map the vlocal and neighbor lists to the PETSc ordering
263: */
264: AOApplicationToPetsc(ao,user.Nvlocal,user.locInd);
265: AOApplicationToPetsc(ao,Nvneighborstotal,tmp);
266: AODestroy(&ao);
268: PetscFPrintf(PETSC_COMM_SELF,fptr1,"After AOApplicationToPetsc, local indices are : \n");
269: for (i=0; i < user.Nvlocal; i++) {
270: PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.locInd[i]);
271: }
272: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
274: jstart = 0;
275: for (i=0; i < user.Nvlocal; i++) {
276: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.locInd[i]);
277: for (j=0; j < user.itot[i]; j++) {
278: user.AdjM[i][j] = tmp[j+jstart];
280: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
281: }
282: jstart += user.itot[i];
283: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
284: }
286: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
287: Extract the ghost vertex information for each processor
288: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
289: /*
290: Next, we need to generate a list of vertices required for this processor
291: and a local numbering scheme for all vertices required on this processor.
292: vertices - integer array of all vertices needed on this processor in PETSc
293: global numbering; this list consists of first the "locally owned"
294: vertices followed by the ghost vertices.
295: verticesmask - integer array that for each global vertex lists its local
296: vertex number (in vertices) + 1. If the global vertex is not
297: represented on this processor, then the corresponding
298: entry in verticesmask is zero
300: Note: vertices and verticesmask are both Nvglobal in length; this may
301: sound terribly non-scalable, but in fact is not so bad for a reasonable
302: number of processors. Importantly, it allows us to use NO SEARCHING
303: in setting up the data structures.
304: */
305: PetscMalloc1(user.Nvglobal,&vertices);
306: PetscCalloc1(user.Nvglobal,&verticesmask);
307: nvertices = 0;
309: /*
310: First load "owned vertices" into list
311: */
312: for (i=0; i < user.Nvlocal; i++) {
313: vertices[nvertices++] = user.locInd[i];
314: verticesmask[user.locInd[i]] = nvertices;
315: }
317: /*
318: Now load ghost vertices into list
319: */
320: for (i=0; i < user.Nvlocal; i++) {
321: for (j=0; j < user.itot[i]; j++) {
322: nb = user.AdjM[i][j];
323: if (!verticesmask[nb]) {
324: vertices[nvertices++] = nb;
325: verticesmask[nb] = nvertices;
326: }
327: }
328: }
330: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
331: PetscFPrintf(PETSC_COMM_SELF,fptr1,"The array vertices is :\n");
332: for (i=0; i < nvertices; i++) {
333: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",vertices[i]);
334: }
335: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
337: /*
338: Map the vertices listed in the neighbors to the local numbering from
339: the global ordering that they contained initially.
340: */
341: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
342: PetscFPrintf(PETSC_COMM_SELF,fptr1,"After mapping neighbors in the local contiguous ordering\n");
343: for (i=0; i<user.Nvlocal; i++) {
344: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are :\n",i);
345: for (j = 0; j < user.itot[i]; j++) {
346: nb = user.AdjM[i][j];
347: user.AdjM[i][j] = verticesmask[nb] - 1;
349: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
350: }
351: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
352: }
354: N = user.Nvglobal;
356: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
357: Create vector and matrix data structures
358: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
360: /*
361: Create vector data structures
362: */
363: VecCreate(MPI_COMM_WORLD,&x);
364: VecSetSizes(x,user.Nvlocal,N);
365: VecSetFromOptions(x);
366: VecDuplicate(x,&r);
367: VecCreateSeq(MPI_COMM_SELF,bs*nvertices,&user.localX);
368: VecDuplicate(user.localX,&user.localF);
370: /*
371: Create the scatter between the global representation and the
372: local representation
373: */
374: ISCreateStride(MPI_COMM_SELF,bs*nvertices,0,1,&islocal);
375: ISCreateBlock(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isglobal);
376: VecScatterCreate(x,isglobal,user.localX,islocal,&user.scatter);
377: ISDestroy(&isglobal);
378: ISDestroy(&islocal);
380: /*
381: Create matrix data structure; Just to keep the example simple, we have not done any
382: preallocation of memory for the matrix. In real Section 1.5 Writing Application Codes with PETSc code with big matrices,
383: preallocation should always be done to expedite the matrix creation.
384: */
385: MatCreate(MPI_COMM_WORLD,&Jac);
386: MatSetSizes(Jac,PETSC_DECIDE,PETSC_DECIDE,N,N);
387: MatSetFromOptions(Jac);
388: MatSetUp(Jac);
390: /*
391: The following routine allows us to set the matrix values in local ordering
392: */
393: ISLocalToGlobalMappingCreate(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isl2g);
394: MatSetLocalToGlobalMapping(Jac,isl2g,isl2g);
396: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
397: Create nonlinear solver context
398: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
400: SNESCreate(MPI_COMM_WORLD,&snes);
401: SNESSetType(snes,type);
403: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
404: Set routines for function and Jacobian evaluation
405: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
406: SNESSetFunction(snes,r,FormFunction,(void*)&user);
408: PetscOptionsGetBool(NULL,NULL,"-fd_jacobian_coloring",&fd_jacobian_coloring,0);
409: if (!fd_jacobian_coloring) {
410: SNESSetJacobian(snes,Jac,Jac,FormJacobian,(void*)&user);
411: } else { /* Use matfdcoloring */
412: ISColoring iscoloring;
413: MatColoring mc;
415: /* Get the data structure of Jac */
416: FormJacobian(snes,x,Jac,Jac,&user);
417: /* Create coloring context */
418: MatColoringCreate(Jac,&mc);
419: MatColoringSetType(mc,MATCOLORINGSL);
420: MatColoringSetFromOptions(mc);
421: MatColoringApply(mc,&iscoloring);
422: MatColoringDestroy(&mc);
423: MatFDColoringCreate(Jac,iscoloring,&matfdcoloring);
424: MatFDColoringSetFunction(matfdcoloring,(PetscErrorCode (*)(void))FormFunction,&user);
425: MatFDColoringSetFromOptions(matfdcoloring);
426: MatFDColoringSetUp(Jac,iscoloring,matfdcoloring);
427: /* MatFDColoringView(matfdcoloring,PETSC_VIEWER_STDOUT_WORLD); */
428: SNESSetJacobian(snes,Jac,Jac,SNESComputeJacobianDefaultColor,matfdcoloring);
429: ISColoringDestroy(&iscoloring);
430: }
432: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
433: Customize nonlinear solver; set runtime options
434: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
436: SNESSetFromOptions(snes);
438: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
439: Evaluate initial guess; then solve nonlinear system
440: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
442: /*
443: Note: The user should initialize the vector, x, with the initial guess
444: for the nonlinear solver prior to calling SNESSolve(). In particular,
445: to employ an initial guess of zero, the user should explicitly set
446: this vector to zero by calling VecSet().
447: */
448: FormInitialGuess(&user,x);
450: /*
451: Print the initial guess
452: */
453: VecGetArray(x,&xx);
454: for (inode = 0; inode < user.Nvlocal; inode++) {
455: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Initial Solution at node %D is %f \n",inode,xx[inode]);
456: }
457: VecRestoreArray(x,&xx);
459: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
460: Now solve the nonlinear system
461: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
463: SNESSolve(snes,NULL,x);
464: SNESGetIterationNumber(snes,&its);
465: SNESGetNonlinearStepFailures(snes,&nfails);
467: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
468: Print the output : solution vector and other information
469: Each processor writes to the file output.<rank> where rank is the
470: processor's rank.
471: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
473: VecGetArray(x,&xx);
474: for (inode = 0; inode < user.Nvlocal; inode++) {
475: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Solution at node %D is %f \n",inode,xx[inode]);
476: }
477: VecRestoreArray(x,&xx);
478: fclose(fptr1);
479: PetscPrintf(MPI_COMM_WORLD,"number of SNES iterations = %D, ",its);
480: PetscPrintf(MPI_COMM_WORLD,"number of unsuccessful steps = %D\n",nfails);
482: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
483: Free work space. All PETSc objects should be destroyed when they
484: are no longer needed.
485: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
486: PetscFree(user.gloInd);
487: PetscFree(user.locInd);
488: PetscFree(vertices);
489: PetscFree(verticesmask);
490: PetscFree(tmp);
491: VecScatterDestroy(&user.scatter);
492: ISLocalToGlobalMappingDestroy(&isl2g);
493: VecDestroy(&x);
494: VecDestroy(&r);
495: VecDestroy(&user.localX);
496: VecDestroy(&user.localF);
497: MatDestroy(&Jac); SNESDestroy(&snes);
498: /*PetscDrawDestroy(draw);*/
499: if (fd_jacobian_coloring) {
500: MatFDColoringDestroy(&matfdcoloring);
501: }
502: PetscFinalize();
503: return ierr;
504: }
505: /* -------------------- Form initial approximation ----------------- */
507: /*
508: FormInitialGuess - Forms initial approximation.
510: Input Parameters:
511: user - user-defined Section 1.5 Writing Application Codes with PETSc context
512: X - vector
514: Output Parameter:
515: X - vector
516: */
517: PetscErrorCode FormInitialGuess(AppCtx *user,Vec X)
518: {
519: PetscInt i,Nvlocal,ierr;
520: PetscInt *gloInd;
521: PetscScalar *x;
522: #if defined(UNUSED_VARIABLES)
523: PetscReal temp1,temp,hx,hy,hxdhy,hydhx,sc;
524: PetscInt Neglobal,Nvglobal,j,row;
525: PetscReal alpha,lambda;
527: Nvglobal = user->Nvglobal;
528: Neglobal = user->Neglobal;
529: lambda = user->non_lin_param;
530: alpha = user->lin_param;
531: #endif
533: Nvlocal = user->Nvlocal;
534: gloInd = user->gloInd;
536: /*
537: Get a pointer to vector data.
538: - For default PETSc vectors, VecGetArray() returns a pointer to
539: the data array. Otherwise, the routine is implementation dependent.
540: - You MUST call VecRestoreArray() when you no longer need access to
541: the array.
542: */
543: VecGetArray(X,&x);
545: /*
546: Compute initial guess over the locally owned part of the grid
547: */
548: for (i=0; i < Nvlocal; i++) x[i] = (PetscReal)gloInd[i];
550: /*
551: Restore vector
552: */
553: VecRestoreArray(X,&x);
554: return 0;
555: }
556: /* -------------------- Evaluate Function F(x) --------------------- */
557: /*
558: FormFunction - Evaluates nonlinear function, F(x).
560: Input Parameters:
561: . snes - the SNES context
562: . X - input vector
563: . ptr - optional user-defined context, as set by SNESSetFunction()
565: Output Parameter:
566: . F - function vector
567: */
568: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
569: {
570: AppCtx *user = (AppCtx*)ptr;
572: PetscInt i,j,Nvlocal;
573: PetscReal alpha,lambda;
574: PetscScalar *x,*f;
575: VecScatter scatter;
576: Vec localX = user->localX;
577: #if defined(UNUSED_VARIABLES)
578: PetscScalar ut,ub,ul,ur,u,*g,sc,uyy,uxx;
579: PetscReal hx,hy,hxdhy,hydhx;
580: PetscReal two = 2.0,one = 1.0;
581: PetscInt Nvglobal,Neglobal,row;
582: PetscInt *gloInd;
584: Nvglobal = user->Nvglobal;
585: Neglobal = user->Neglobal;
586: gloInd = user->gloInd;
587: #endif
589: Nvlocal = user->Nvlocal;
590: lambda = user->non_lin_param;
591: alpha = user->lin_param;
592: scatter = user->scatter;
594: /*
595: PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
596: described in the beginning of this code
598: First scatter the distributed vector X into local vector localX (that includes
599: values for ghost nodes. If we wish,we can put some other work between
600: VecScatterBegin() and VecScatterEnd() to overlap the communication with
601: computation.
602: */
603: VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
604: VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
606: /*
607: Get pointers to vector data
608: */
609: VecGetArray(localX,&x);
610: VecGetArray(F,&f);
612: /*
613: Now compute the f(x). As mentioned earlier, the computed Laplacian is just an
614: approximate one chosen for illustrative purpose only. Another point to notice
615: is that this is a local (completly parallel) calculation. In practical Section 1.5 Writing Application Codes with PETSc
616: codes, function calculation time is a dominat portion of the overall execution time.
617: */
618: for (i=0; i < Nvlocal; i++) {
619: f[i] = (user->itot[i] - alpha)*x[i] - lambda*x[i]*x[i];
620: for (j = 0; j < user->itot[i]; j++) f[i] -= x[user->AdjM[i][j]];
621: }
623: /*
624: Restore vectors
625: */
626: VecRestoreArray(localX,&x);
627: VecRestoreArray(F,&f);
628: /*VecView(F,PETSC_VIEWER_STDOUT_WORLD);*/
630: return 0;
631: }
633: /* -------------------- Evaluate Jacobian F'(x) -------------------- */
634: /*
635: FormJacobian - Evaluates Jacobian matrix.
637: Input Parameters:
638: . snes - the SNES context
639: . X - input vector
640: . ptr - optional user-defined context, as set by SNESSetJacobian()
642: Output Parameters:
643: . A - Jacobian matrix
644: . B - optionally different preconditioning matrix
645: . flag - flag indicating matrix structure
647: */
648: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat J,Mat jac,void *ptr)
649: {
650: AppCtx *user = (AppCtx*)ptr;
651: PetscInt i,j,Nvlocal,col[50],ierr;
652: PetscScalar alpha,lambda,value[50];
653: Vec localX = user->localX;
654: VecScatter scatter;
655: PetscScalar *x;
656: #if defined(UNUSED_VARIABLES)
657: PetscScalar two = 2.0,one = 1.0;
658: PetscInt row,Nvglobal,Neglobal;
659: PetscInt *gloInd;
661: Nvglobal = user->Nvglobal;
662: Neglobal = user->Neglobal;
663: gloInd = user->gloInd;
664: #endif
666: /*printf("Entering into FormJacobian \n");*/
667: Nvlocal = user->Nvlocal;
668: lambda = user->non_lin_param;
669: alpha = user->lin_param;
670: scatter = user->scatter;
672: /*
673: PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
674: described in the beginning of this code
676: First scatter the distributed vector X into local vector localX (that includes
677: values for ghost nodes. If we wish, we can put some other work between
678: VecScatterBegin() and VecScatterEnd() to overlap the communication with
679: computation.
680: */
681: VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
682: VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
684: /*
685: Get pointer to vector data
686: */
687: VecGetArray(localX,&x);
689: for (i=0; i < Nvlocal; i++) {
690: col[0] = i;
691: value[0] = user->itot[i] - 2.0*lambda*x[i] - alpha;
692: for (j = 0; j < user->itot[i]; j++) {
693: col[j+1] = user->AdjM[i][j];
694: value[j+1] = -1.0;
695: }
697: /*
698: Set the matrix values in the local ordering. Note that in order to use this
699: feature we must call the routine MatSetLocalToGlobalMapping() after the
700: matrix has been created.
701: */
702: MatSetValuesLocal(jac,1,&i,1+user->itot[i],col,value,INSERT_VALUES);
703: }
705: /*
706: Assemble matrix, using the 2-step process:
707: MatAssemblyBegin(), MatAssemblyEnd().
708: Between these two calls, the pointer to vector data has been restored to
709: demonstrate the use of overlapping communicationn with computation.
710: */
711: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
712: VecRestoreArray(localX,&x);
713: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
715: /*
716: Tell the matrix we will never add a new nonzero location to the
717: matrix. If we do, it will generate an error.
718: */
719: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
720: /* MatView(jac,PETSC_VIEWER_STDOUT_SELF); */
721: return 0;
722: }
726: /*TEST
728: build:
729: requires: !complex
731: test:
732: nsize: 2
733: args: -snes_monitor_short
734: localrunfiles: options.inf adj.in
736: test:
737: suffix: 2
738: nsize: 2
739: args: -snes_monitor_short -fd_jacobian_coloring
740: localrunfiles: options.inf adj.in
742: TEST*/