Actual source code: ex10.c
petsc-3.7.3 2016-08-01
2: /*
3: Include "petscsnes.h" so that we can use SNES solvers. Note that this
4: file automatically includes:
5: petscsys.h - base PETSc routines petscvec.h - vectors
6: petscmat.h - matrices
7: petscis.h - index sets petscksp.h - Krylov subspace methods
8: petscviewer.h - viewers petscpc.h - preconditioners
9: petscksp.h - linear solvers
10: */
11: #include <petscsnes.h>
12: #include <petscao.h>
14: #if !defined(PETSC_USE_COMPLEX)
16: static char help[] = "An Unstructured Grid Example.\n\
17: This example demonstrates how to solve a nonlinear system in parallel\n\
18: with SNES for an unstructured mesh. The mesh and partitioning information\n\
19: is read in an application defined ordering,which is later transformed\n\
20: into another convenient ordering (called the local ordering). The local\n\
21: ordering, apart from being efficient on cpu cycles and memory, allows\n\
22: the use of the SPMD model of parallel programming. After partitioning\n\
23: is done, scatters are created between local (sequential)and global\n\
24: (distributed) vectors. Finally, we set up the nonlinear solver context\n\
25: in the usual way as a structured grid (see\n\
26: petsc/src/snes/examples/tutorials/ex5.c).\n\
27: This example also illustrates the use of parallel matrix coloring.\n\
28: The command line options include:\n\
29: -vert <Nv>, where Nv is the global number of nodes\n\
30: -elem <Ne>, where Ne is the global number of elements\n\
31: -nl_par <lambda>, where lambda is the multiplier for the non linear term (u*u) term\n\
32: -lin_par <alpha>, where alpha is the multiplier for the linear term (u)\n\
33: -fd_jacobian_coloring -mat_coloring_type lf\n";
35: /*T
36: Concepts: SNES^unstructured grid
37: Concepts: AO^application to PETSc ordering or vice versa;
38: Concepts: VecScatter^using vector scatter operations;
39: Processors: n
40: T*/
42: /* ------------------------------------------------------------------------
44: PDE Solved : L(u) + lambda*u*u + alpha*u = 0 where L(u) is the Laplacian.
46: The Laplacian is approximated in the following way: each edge is given a weight
47: of one meaning that the diagonal term will have the weight equal to the degree
48: of a node. The off diagonal terms will get a weight of -1.
50: -----------------------------------------------------------------------*/
53: #define MAX_ELEM 500 /* Maximum number of elements */
54: #define MAX_VERT 100 /* Maximum number of vertices */
55: #define MAX_VERT_ELEM 3 /* Vertices per element */
57: /*
58: Application-defined context for problem specific data
59: */
60: typedef struct {
61: PetscInt Nvglobal,Nvlocal; /* global and local number of vertices */
62: PetscInt Neglobal,Nelocal; /* global and local number of vertices */
63: PetscInt AdjM[MAX_VERT][50]; /* adjacency list of a vertex */
64: PetscInt itot[MAX_VERT]; /* total number of neighbors for a vertex */
65: PetscInt icv[MAX_ELEM][MAX_VERT_ELEM]; /* vertices belonging to an element */
66: PetscInt v2p[MAX_VERT]; /* processor number for a vertex */
67: PetscInt *locInd,*gloInd; /* local and global orderings for a node */
68: Vec localX,localF; /* local solution (u) and f(u) vectors */
69: PetscReal non_lin_param; /* nonlinear parameter for the PDE */
70: PetscReal lin_param; /* linear parameter for the PDE */
71: VecScatter scatter; /* scatter context for the local and
72: distributed vectors */
73: } AppCtx;
75: /*
76: User-defined routines
77: */
78: PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*);
79: PetscErrorCode FormFunction(SNES,Vec,Vec,void*);
80: PetscErrorCode FormInitialGuess(AppCtx*,Vec);
84: int main(int argc,char **argv)
85: {
86: SNES snes; /* SNES context */
87: SNESType type = SNESNEWTONLS; /* default nonlinear solution method */
88: Vec x,r; /* solution, residual vectors */
89: Mat Jac; /* Jacobian matrix */
90: AppCtx user; /* user-defined application context */
91: AO ao; /* Application Ordering object */
92: IS isglobal,islocal; /* global and local index sets */
93: PetscMPIInt rank,size; /* rank of a process, number of processors */
94: PetscInt rstart; /* starting index of PETSc ordering for a processor */
95: PetscInt nfails; /* number of unsuccessful Newton steps */
96: PetscInt bs = 1; /* block size for multicomponent systems */
97: PetscInt nvertices; /* number of local plus ghost nodes of a processor */
98: PetscInt *pordering; /* PETSc ordering */
99: PetscInt *vertices; /* list of all vertices (incl. ghost ones) on a processor */
100: PetscInt *verticesmask;
101: PetscInt *tmp;
102: PetscInt i,j,jstart,inode,nb,nbrs,Nvneighborstotal = 0;
103: PetscErrorCode ierr;
104: PetscInt its,N;
105: PetscScalar *xx;
106: char str[256],form[256],part_name[256];
107: FILE *fptr,*fptr1;
108: ISLocalToGlobalMapping isl2g;
109: int dtmp;
110: #if defined(UNUSED_VARIABLES)
111: PetscDraw draw; /* drawing context */
112: PetscScalar *ff,*gg;
113: PetscReal tiny = 1.0e-10,zero = 0.0,one = 1.0,big = 1.0e+10;
114: PetscInt *tmp1,*tmp2;
115: #endif
116: MatFDColoring matfdcoloring = 0;
117: PetscBool fd_jacobian_coloring = PETSC_FALSE;
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Initialize program
121: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: PetscInitialize(&argc,&argv,"options.inf",help);
124: MPI_Comm_rank(MPI_COMM_WORLD,&rank);
125: MPI_Comm_size(MPI_COMM_WORLD,&size);
127: /* The current input file options.inf is for 2 proc run only */
128: if (size != 2) SETERRQ(PETSC_COMM_SELF,1,"This Example currently runs on 2 procs only.");
130: /*
131: Initialize problem parameters
132: */
133: user.Nvglobal = 16; /*Global # of vertices */
134: user.Neglobal = 18; /*Global # of elements */
136: PetscOptionsGetInt(NULL,NULL,"-vert",&user.Nvglobal,NULL);
137: PetscOptionsGetInt(NULL,NULL,"-elem",&user.Neglobal,NULL);
139: user.non_lin_param = 0.06;
141: PetscOptionsGetReal(NULL,NULL,"-nl_par",&user.non_lin_param,NULL);
143: user.lin_param = -1.0;
145: PetscOptionsGetReal(NULL,NULL,"-lin_par",&user.lin_param,NULL);
147: user.Nvlocal = 0;
148: user.Nelocal = 0;
150: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: Read the mesh and partitioning information
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: /*
155: Read the mesh and partitioning information from 'adj.in'.
156: The file format is as follows.
157: For each line the first entry is the processor rank where the
158: current node belongs. The second entry is the number of
159: neighbors of a node. The rest of the line is the adjacency
160: list of a node. Currently this file is set up to work on two
161: processors.
163: This is not a very good example of reading input. In the future,
164: we will put an example that shows the style that should be
165: used in a real application, where partitioning will be done
166: dynamically by calling partitioning routines (at present, we have
167: a ready interface to ParMeTiS).
168: */
169: fptr = fopen("adj.in","r");
170: if (!fptr) SETERRQ(PETSC_COMM_SELF,0,"Could not open adj.in");
172: /*
173: Each processor writes to the file output.<rank> where rank is the
174: processor's rank.
175: */
176: sprintf(part_name,"output.%d",rank);
177: fptr1 = fopen(part_name,"w");
178: if (!fptr1) SETERRQ(PETSC_COMM_SELF,0,"Could no open output file");
179: PetscMalloc1(user.Nvglobal,&user.gloInd);
180: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Rank is %D\n",rank);
181: for (inode = 0; inode < user.Nvglobal; inode++) {
182: if (!fgets(str,256,fptr)) SETERRQ(PETSC_COMM_SELF,1,"fgets read failed");
183: sscanf(str,"%d",&dtmp);user.v2p[inode] = dtmp;
184: if (user.v2p[inode] == rank) {
185: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Node %D belongs to processor %D\n",inode,user.v2p[inode]);
187: user.gloInd[user.Nvlocal] = inode;
188: sscanf(str,"%*d %d",&dtmp);
189: nbrs = dtmp;
190: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Number of neighbors for the vertex %D is %D\n",inode,nbrs);
192: user.itot[user.Nvlocal] = nbrs;
193: Nvneighborstotal += nbrs;
194: for (i = 0; i < user.itot[user.Nvlocal]; i++) {
195: form[0]='\0';
196: for (j=0; j < i+2; j++) {
197: PetscStrcat(form,"%*d ");
198: }
199: PetscStrcat(form,"%d");
201: sscanf(str,form,&dtmp);
202: user.AdjM[user.Nvlocal][i] = dtmp;
204: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[user.Nvlocal][i]);
205: }
206: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
207: user.Nvlocal++;
208: }
209: }
210: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Total # of Local Vertices is %D \n",user.Nvlocal);
212: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: Create different orderings
214: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
216: /*
217: Create the local ordering list for vertices. First a list using the PETSc global
218: ordering is created. Then we use the AO object to get the PETSc-to-application and
219: application-to-PETSc mappings. Each vertex also gets a local index (stored in the
220: locInd array).
221: */
222: MPI_Scan(&user.Nvlocal,&rstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
223: rstart -= user.Nvlocal;
224: PetscMalloc1(user.Nvlocal,&pordering);
226: for (i=0; i < user.Nvlocal; i++) pordering[i] = rstart + i;
228: /*
229: Create the AO object
230: */
231: AOCreateBasic(MPI_COMM_WORLD,user.Nvlocal,user.gloInd,pordering,&ao);
232: PetscFree(pordering);
234: /*
235: Keep the global indices for later use
236: */
237: PetscMalloc1(user.Nvlocal,&user.locInd);
238: PetscMalloc1(Nvneighborstotal,&tmp);
240: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241: Demonstrate the use of AO functionality
242: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
244: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Before AOApplicationToPetsc, local indices are : \n");
245: for (i=0; i < user.Nvlocal; i++) {
246: PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.gloInd[i]);
248: user.locInd[i] = user.gloInd[i];
249: }
250: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
251: jstart = 0;
252: for (i=0; i < user.Nvlocal; i++) {
253: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.gloInd[i]);
254: for (j=0; j < user.itot[i]; j++) {
255: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
257: tmp[j + jstart] = user.AdjM[i][j];
258: }
259: jstart += user.itot[i];
260: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
261: }
263: /*
264: Now map the vlocal and neighbor lists to the PETSc ordering
265: */
266: AOApplicationToPetsc(ao,user.Nvlocal,user.locInd);
267: AOApplicationToPetsc(ao,Nvneighborstotal,tmp);
268: AODestroy(&ao);
270: PetscFPrintf(PETSC_COMM_SELF,fptr1,"After AOApplicationToPetsc, local indices are : \n");
271: for (i=0; i < user.Nvlocal; i++) {
272: PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.locInd[i]);
273: }
274: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
276: jstart = 0;
277: for (i=0; i < user.Nvlocal; i++) {
278: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.locInd[i]);
279: for (j=0; j < user.itot[i]; j++) {
280: user.AdjM[i][j] = tmp[j+jstart];
282: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
283: }
284: jstart += user.itot[i];
285: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
286: }
288: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
289: Extract the ghost vertex information for each processor
290: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
291: /*
292: Next, we need to generate a list of vertices required for this processor
293: and a local numbering scheme for all vertices required on this processor.
294: vertices - integer array of all vertices needed on this processor in PETSc
295: global numbering; this list consists of first the "locally owned"
296: vertices followed by the ghost vertices.
297: verticesmask - integer array that for each global vertex lists its local
298: vertex number (in vertices) + 1. If the global vertex is not
299: represented on this processor, then the corresponding
300: entry in verticesmask is zero
302: Note: vertices and verticesmask are both Nvglobal in length; this may
303: sound terribly non-scalable, but in fact is not so bad for a reasonable
304: number of processors. Importantly, it allows us to use NO SEARCHING
305: in setting up the data structures.
306: */
307: PetscMalloc1(user.Nvglobal,&vertices);
308: PetscMalloc1(user.Nvglobal,&verticesmask);
309: PetscMemzero(verticesmask,user.Nvglobal*sizeof(PetscInt));
310: nvertices = 0;
312: /*
313: First load "owned vertices" into list
314: */
315: for (i=0; i < user.Nvlocal; i++) {
316: vertices[nvertices++] = user.locInd[i];
317: verticesmask[user.locInd[i]] = nvertices;
318: }
320: /*
321: Now load ghost vertices into list
322: */
323: for (i=0; i < user.Nvlocal; i++) {
324: for (j=0; j < user.itot[i]; j++) {
325: nb = user.AdjM[i][j];
326: if (!verticesmask[nb]) {
327: vertices[nvertices++] = nb;
328: verticesmask[nb] = nvertices;
329: }
330: }
331: }
333: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
334: PetscFPrintf(PETSC_COMM_SELF,fptr1,"The array vertices is :\n");
335: for (i=0; i < nvertices; i++) {
336: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",vertices[i]);
337: }
338: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
340: /*
341: Map the vertices listed in the neighbors to the local numbering from
342: the global ordering that they contained initially.
343: */
344: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
345: PetscFPrintf(PETSC_COMM_SELF,fptr1,"After mapping neighbors in the local contiguous ordering\n");
346: for (i=0; i<user.Nvlocal; i++) {
347: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are :\n",i);
348: for (j = 0; j < user.itot[i]; j++) {
349: nb = user.AdjM[i][j];
350: user.AdjM[i][j] = verticesmask[nb] - 1;
352: PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
353: }
354: PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
355: }
357: N = user.Nvglobal;
359: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
360: Create vector and matrix data structures
361: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
363: /*
364: Create vector data structures
365: */
366: VecCreate(MPI_COMM_WORLD,&x);
367: VecSetSizes(x,user.Nvlocal,N);
368: VecSetFromOptions(x);
369: VecDuplicate(x,&r);
370: VecCreateSeq(MPI_COMM_SELF,bs*nvertices,&user.localX);
371: VecDuplicate(user.localX,&user.localF);
373: /*
374: Create the scatter between the global representation and the
375: local representation
376: */
377: ISCreateStride(MPI_COMM_SELF,bs*nvertices,0,1,&islocal);
378: ISCreateBlock(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isglobal);
379: VecScatterCreate(x,isglobal,user.localX,islocal,&user.scatter);
380: ISDestroy(&isglobal);
381: ISDestroy(&islocal);
383: /*
384: Create matrix data structure; Just to keep the example simple, we have not done any
385: preallocation of memory for the matrix. In real application code with big matrices,
386: preallocation should always be done to expedite the matrix creation.
387: */
388: MatCreate(MPI_COMM_WORLD,&Jac);
389: MatSetSizes(Jac,PETSC_DECIDE,PETSC_DECIDE,N,N);
390: MatSetFromOptions(Jac);
391: MatSetUp(Jac);
393: /*
394: The following routine allows us to set the matrix values in local ordering
395: */
396: ISLocalToGlobalMappingCreate(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isl2g);
397: MatSetLocalToGlobalMapping(Jac,isl2g,isl2g);
399: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
400: Create nonlinear solver context
401: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
403: SNESCreate(MPI_COMM_WORLD,&snes);
404: SNESSetType(snes,type);
406: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
407: Set routines for function and Jacobian evaluation
408: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
409: SNESSetFunction(snes,r,FormFunction,(void*)&user);
411: PetscOptionsGetBool(NULL,NULL,"-fd_jacobian_coloring",&fd_jacobian_coloring,0);
412: if (!fd_jacobian_coloring) {
413: SNESSetJacobian(snes,Jac,Jac,FormJacobian,(void*)&user);
414: } else { /* Use matfdcoloring */
415: ISColoring iscoloring;
416: MatColoring mc;
418: /* Get the data structure of Jac */
419: FormJacobian(snes,x,Jac,Jac,&user);
420: /* Create coloring context */
421: MatColoringCreate(Jac,&mc);
422: MatColoringSetType(mc,MATCOLORINGSL);
423: MatColoringSetFromOptions(mc);
424: MatColoringApply(mc,&iscoloring);
425: MatColoringDestroy(&mc);
426: MatFDColoringCreate(Jac,iscoloring,&matfdcoloring);
427: MatFDColoringSetFunction(matfdcoloring,(PetscErrorCode (*)(void))FormFunction,&user);
428: MatFDColoringSetFromOptions(matfdcoloring);
429: MatFDColoringSetUp(Jac,iscoloring,matfdcoloring);
430: /* MatFDColoringView(matfdcoloring,PETSC_VIEWER_STDOUT_WORLD); */
431: SNESSetJacobian(snes,Jac,Jac,SNESComputeJacobianDefaultColor,matfdcoloring);
432: ISColoringDestroy(&iscoloring);
433: }
435: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
436: Customize nonlinear solver; set runtime options
437: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
439: SNESSetFromOptions(snes);
441: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
442: Evaluate initial guess; then solve nonlinear system
443: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
445: /*
446: Note: The user should initialize the vector, x, with the initial guess
447: for the nonlinear solver prior to calling SNESSolve(). In particular,
448: to employ an initial guess of zero, the user should explicitly set
449: this vector to zero by calling VecSet().
450: */
451: FormInitialGuess(&user,x);
453: /*
454: Print the initial guess
455: */
456: VecGetArray(x,&xx);
457: for (inode = 0; inode < user.Nvlocal; inode++) {
458: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Initial Solution at node %D is %f \n",inode,xx[inode]);
459: }
460: VecRestoreArray(x,&xx);
462: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
463: Now solve the nonlinear system
464: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
466: SNESSolve(snes,NULL,x);
467: SNESGetIterationNumber(snes,&its);
468: SNESGetNonlinearStepFailures(snes,&nfails);
470: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
471: Print the output : solution vector and other information
472: Each processor writes to the file output.<rank> where rank is the
473: processor's rank.
474: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
476: VecGetArray(x,&xx);
477: for (inode = 0; inode < user.Nvlocal; inode++) {
478: PetscFPrintf(PETSC_COMM_SELF,fptr1,"Solution at node %D is %f \n",inode,xx[inode]);
479: }
480: VecRestoreArray(x,&xx);
481: fclose(fptr1);
482: PetscPrintf(MPI_COMM_WORLD,"number of SNES iterations = %D, ",its);
483: PetscPrintf(MPI_COMM_WORLD,"number of unsuccessful steps = %D\n",nfails);
485: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
486: Free work space. All PETSc objects should be destroyed when they
487: are no longer needed.
488: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
489: PetscFree(user.gloInd);
490: PetscFree(user.locInd);
491: PetscFree(vertices);
492: PetscFree(verticesmask);
493: PetscFree(tmp);
494: VecScatterDestroy(&user.scatter);
495: ISLocalToGlobalMappingDestroy(&isl2g);
496: VecDestroy(&x);
497: VecDestroy(&r);
498: VecDestroy(&user.localX);
499: VecDestroy(&user.localF);
500: MatDestroy(&Jac); SNESDestroy(&snes);
501: /*PetscDrawDestroy(draw);*/
502: if (fd_jacobian_coloring) {
503: MatFDColoringDestroy(&matfdcoloring);
504: }
505: PetscFinalize();
507: return 0;
508: }
511: /* -------------------- Form initial approximation ----------------- */
513: /*
514: FormInitialGuess - Forms initial approximation.
516: Input Parameters:
517: user - user-defined application context
518: X - vector
520: Output Parameter:
521: X - vector
522: */
523: PetscErrorCode FormInitialGuess(AppCtx *user,Vec X)
524: {
525: PetscInt i,Nvlocal,ierr;
526: PetscInt *gloInd;
527: PetscScalar *x;
528: #if defined(UNUSED_VARIABLES)
529: PetscReal temp1,temp,hx,hy,hxdhy,hydhx,sc;
530: PetscInt Neglobal,Nvglobal,j,row;
531: PetscReal alpha,lambda;
533: Nvglobal = user->Nvglobal;
534: Neglobal = user->Neglobal;
535: lambda = user->non_lin_param;
536: alpha = user->lin_param;
537: #endif
539: Nvlocal = user->Nvlocal;
540: gloInd = user->gloInd;
542: /*
543: Get a pointer to vector data.
544: - For default PETSc vectors, VecGetArray() returns a pointer to
545: the data array. Otherwise, the routine is implementation dependent.
546: - You MUST call VecRestoreArray() when you no longer need access to
547: the array.
548: */
549: VecGetArray(X,&x);
551: /*
552: Compute initial guess over the locally owned part of the grid
553: */
554: for (i=0; i < Nvlocal; i++) x[i] = (PetscReal)gloInd[i];
556: /*
557: Restore vector
558: */
559: VecRestoreArray(X,&x);
560: return 0;
561: }
564: /* -------------------- Evaluate Function F(x) --------------------- */
565: /*
566: FormFunction - Evaluates nonlinear function, F(x).
568: Input Parameters:
569: . snes - the SNES context
570: . X - input vector
571: . ptr - optional user-defined context, as set by SNESSetFunction()
573: Output Parameter:
574: . F - function vector
575: */
576: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
577: {
578: AppCtx *user = (AppCtx*)ptr;
580: PetscInt i,j,Nvlocal;
581: PetscReal alpha,lambda;
582: PetscScalar *x,*f;
583: VecScatter scatter;
584: Vec localX = user->localX;
585: #if defined(UNUSED_VARIABLES)
586: PetscScalar ut,ub,ul,ur,u,*g,sc,uyy,uxx;
587: PetscReal hx,hy,hxdhy,hydhx;
588: PetscReal two = 2.0,one = 1.0;
589: PetscInt Nvglobal,Neglobal,row;
590: PetscInt *gloInd;
592: Nvglobal = user->Nvglobal;
593: Neglobal = user->Neglobal;
594: gloInd = user->gloInd;
595: #endif
597: Nvlocal = user->Nvlocal;
598: lambda = user->non_lin_param;
599: alpha = user->lin_param;
600: scatter = user->scatter;
602: /*
603: PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
604: described in the beginning of this code
606: First scatter the distributed vector X into local vector localX (that includes
607: values for ghost nodes. If we wish,we can put some other work between
608: VecScatterBegin() and VecScatterEnd() to overlap the communication with
609: computation.
610: */
611: VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
612: VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
614: /*
615: Get pointers to vector data
616: */
617: VecGetArray(localX,&x);
618: VecGetArray(F,&f);
620: /*
621: Now compute the f(x). As mentioned earlier, the computed Laplacian is just an
622: approximate one chosen for illustrative purpose only. Another point to notice
623: is that this is a local (completly parallel) calculation. In practical application
624: codes, function calculation time is a dominat portion of the overall execution time.
625: */
626: for (i=0; i < Nvlocal; i++) {
627: f[i] = (user->itot[i] - alpha)*x[i] - lambda*x[i]*x[i];
628: for (j = 0; j < user->itot[i]; j++) f[i] -= x[user->AdjM[i][j]];
629: }
631: /*
632: Restore vectors
633: */
634: VecRestoreArray(localX,&x);
635: VecRestoreArray(F,&f);
636: /*VecView(F,PETSC_VIEWER_STDOUT_WORLD);*/
638: return 0;
639: }
643: /* -------------------- Evaluate Jacobian F'(x) -------------------- */
644: /*
645: FormJacobian - Evaluates Jacobian matrix.
647: Input Parameters:
648: . snes - the SNES context
649: . X - input vector
650: . ptr - optional user-defined context, as set by SNESSetJacobian()
652: Output Parameters:
653: . A - Jacobian matrix
654: . B - optionally different preconditioning matrix
655: . flag - flag indicating matrix structure
657: */
658: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat J,Mat jac,void *ptr)
659: {
660: AppCtx *user = (AppCtx*)ptr;
661: PetscInt i,j,Nvlocal,col[50],ierr;
662: PetscScalar alpha,lambda,value[50];
663: Vec localX = user->localX;
664: VecScatter scatter;
665: PetscScalar *x;
666: #if defined(UNUSED_VARIABLES)
667: PetscScalar two = 2.0,one = 1.0;
668: PetscInt row,Nvglobal,Neglobal;
669: PetscInt *gloInd;
671: Nvglobal = user->Nvglobal;
672: Neglobal = user->Neglobal;
673: gloInd = user->gloInd;
674: #endif
676: /*printf("Entering into FormJacobian \n");*/
677: Nvlocal = user->Nvlocal;
678: lambda = user->non_lin_param;
679: alpha = user->lin_param;
680: scatter = user->scatter;
682: /*
683: PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
684: described in the beginning of this code
686: First scatter the distributed vector X into local vector localX (that includes
687: values for ghost nodes. If we wish, we can put some other work between
688: VecScatterBegin() and VecScatterEnd() to overlap the communication with
689: computation.
690: */
691: VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
692: VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
694: /*
695: Get pointer to vector data
696: */
697: VecGetArray(localX,&x);
699: for (i=0; i < Nvlocal; i++) {
700: col[0] = i;
701: value[0] = user->itot[i] - 2.0*lambda*x[i] - alpha;
702: for (j = 0; j < user->itot[i]; j++) {
703: col[j+1] = user->AdjM[i][j];
704: value[j+1] = -1.0;
705: }
707: /*
708: Set the matrix values in the local ordering. Note that in order to use this
709: feature we must call the routine MatSetLocalToGlobalMapping() after the
710: matrix has been created.
711: */
712: MatSetValuesLocal(jac,1,&i,1+user->itot[i],col,value,INSERT_VALUES);
713: }
715: /*
716: Assemble matrix, using the 2-step process:
717: MatAssemblyBegin(), MatAssemblyEnd().
718: Between these two calls, the pointer to vector data has been restored to
719: demonstrate the use of overlapping communicationn with computation.
720: */
721: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
722: VecRestoreArray(localX,&x);
723: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
725: /*
726: Tell the matrix we will never add a new nonzero location to the
727: matrix. If we do, it will generate an error.
728: */
729: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
730: /* MatView(jac,PETSC_VIEWER_STDOUT_SELF); */
731: return 0;
732: }
733: #else
735: int main(int argc,char **args)
736: {
737: fprintf(stdout,"This example does not work for complex numbers.\n");
738: return 0;
739: }
740: #endif