Actual source code: ex2f.F
petsc-3.3-p7 2013-05-11
2: !
3: !/*T
4: ! Concepts: TS^time-dependent nonlinear problems
5: ! Processors: n
6: !T*/
7: !
8: ! ------------------------------------------------------------------------
9: !
10: ! This program solves a simple time-dependent nonlinear PDE using implicit
11: ! timestepping:
12: ! u * u_xx
13: ! u_t = ---------
14: ! 2*(t+1)^2
15: !
16: ! u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
17: !
18: ! The exact solution is u(t,x) = (1 + x*x) * (1 + t).
19: !
20: ! Note that since the solution is linear in time and quadratic in x,
21: ! the finite difference scheme actually computes the "exact" solution.
22: !
23: ! We use the backward Euler method.
24: !
25: ! --------------------------------------------------------------------------
27: program main
28: implicit none
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: ! Include files
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: !
34: ! Each routine within this program uses the include file 'ex2f.h',
35: ! which itself includes the various PETSc include files as well as
36: ! problem-specific data in several common blocks.
37: !
38: ! This program uses CPP for preprocessing, as indicated by the use of
39: ! PETSc include files in the directory petsc/include/finclude. This
40: ! convention enables use of the CPP preprocessor, which allows the use
41: ! of the #include statements that define PETSc objects and variables.
42: !
43: ! Use of the conventional Fortran include statements is also supported
44: ! In this case, the PETsc include files are located in the directory
45: ! petsc/include/foldinclude.
46: !
47: ! Since one must be very careful to include each file no more than once
48: ! in a Fortran routine, application programmers must exlicitly list
49: ! each file needed for the various PETSc components within their
50: ! program (unlike the C/C++ interface).
51: !
52: ! See the Fortran section of the PETSc users manual for details.
54: #include "ex2f.h"
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Variable declarations
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: !
60: ! Variables:
61: ! ts - timestepping solver
62: ! A - Jacobian matrix context
63: ! u_local - local vector
64: ! u - approximate solution vector
65: ! ftime - final time
66: ! time_total_max - default total max time
67: ! time_steps_max - default max timesteps
68: !
69: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
70: ! are mathematical objects that contain more than just an array of
71: ! double precision numbers. I.e., vectors in PETSc are not just
72: ! double precision x(*).
73: ! However, local vector data can be easily accessed via VecGetArray().
74: ! See the Fortran section of the PETSc users manual for details.
76: TS ts
77: Vec u
78: Mat A
79: PetscBool flg,monitor
80: PetscErrorCode ierr
81: PetscInt time_steps_max,i1,steps
82: PetscReal dt,time_total_max,ftime,norm
84: ! Note: Any user-defined Fortran routines (such as RHSFunction)
85: ! MUST be declared as external.
87: external MyMonitor,RHSFunction
88: external InitialConditions,RHSJacobian
90: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
91: ! Beginning of program
92: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
95: comm = PETSC_COMM_WORLD
96: call MPI_Comm_size(comm,size,ierr)
97: call MPI_Comm_rank(comm,rank,ierr)
99: ! Initialize problem parameters
101: time_steps_max = 1000
102: time_total_max = 100.0
103: m = 60
104: debug = .false.
105: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
106: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-debug',debug,ierr)
107: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-monitor',monitor, &
108: & ierr)
109: one_d0 = 1.0d0
110: two_d0 = 2.0d0
111: four_d0 = 4.0d0
112: h = one_d0/(m-one_d0)
114: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: ! Create vector data structures
116: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: ! Create distributed array (DMDA) to manage parallel grid and vectors
119: ! Set up the ghost point communication pattern. There are m total
120: ! grid values spread equally among all the processors.
121: i1 = 1
122: call DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,m,i1,i1, &
123: & PETSC_NULL_INTEGER,da,ierr)
125: ! Extract global and local vectors from DMDA; then duplicate for remaining
126: ! vectors that are the same types.
128: call DMCreateGlobalVector(da,u,ierr)
129: call DMCreateLocalVector(da,u_local,ierr)
131: ! Make local work vector for evaluating right-hand-side function
132: call VecDuplicate(u_local,localwork,ierr)
134: ! Make global work vector for storing exact solution
135: call VecDuplicate(u,solution,ierr)
137: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: ! Create timestepping solver context; set callback routine for
139: ! right-hand-side function evaluation.
140: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: call TSCreate(comm,ts,ierr)
143: call TSSetProblemType(ts,TS_NONLINEAR,ierr)
144: call TSSetRHSFunction(ts,PETSC_NULL_OBJECT,RHSFunction, &
145: & PETSC_NULL_OBJECT,ierr)
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Set optional user-defined monitoring routine
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: if (monitor) then
153: call TSMonitorSet(ts,MyMonitor,PETSC_NULL_OBJECT, &
154: & PETSC_NULL_FUNCTION,ierr)
155: endif
157: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: ! For nonlinear problems, the user can provide a Jacobian evaluation
159: ! routine (or use a finite differencing approximation).
160: !
161: ! Create matrix data structure; set Jacobian evaluation routine.
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: call MatCreate(comm,A,ierr)
165: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m,ierr)
166: call MatSetFromOptions(A,ierr)
167: call MatSetUp(A,ierr)
169: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-fdjac',flg,ierr)
170: if (flg) then
171: call SetCRoutineFromFortran(ts,A,A,ierr)
172: else
173: call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT, &
174: & ierr)
175: endif
177: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: ! Set solution vector and initial timestep
179: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: dt = h/two_d0
182: call TSSetInitialTimeStep(ts,0.d0,dt,ierr)
184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: ! Customize timestepping solver:
186: ! - Set the solution method to be the Backward Euler method.
187: ! - Set timestepping duration info
188: ! Then set runtime options, which can override these defaults.
189: ! For example,
190: ! -ts_max_steps <maxsteps> -ts_final_time <maxtime>
191: ! to override the defaults set by TSSetDuration().
192: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194: call TSSetType(ts,TSTHETA,ierr)
195: call TSThetaSetTheta(ts,one_d0,ierr)
196: call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
197: call TSSetFromOptions(ts,ierr)
199: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: ! Solve the problem
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203: ! Evaluate initial conditions
205: call InitialConditions(u)
207: ! Run the timestepping solver
209: call TSSolve(ts,u,ftime,ierr)
210: call TSGetTimeStepNumber(ts,steps,ierr)
211: call VecNorm(u,NORM_2,norm,ierr)
213: write(6,100) steps,ftime,norm
214: 100 format('Number of pseudo time-steps ',i5,' final time ',1pe9.2, &
215: & ' solution norm ',1pe9.2)
217: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: ! Free work space. All PETSc objects should be destroyed when they
219: ! are no longer needed.
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: call TSDestroy(ts,ierr)
223: call VecDestroy(localwork,ierr)
224: call VecDestroy(solution,ierr)
225: call VecDestroy(u_local,ierr)
226: call VecDestroy(u,ierr)
227: call DMDestroy(da,ierr)
228: call MatDestroy(A,ierr)
230: ! Always call PetscFinalize() before exiting a program. This routine
231: ! - finalizes the PETSc libraries as well as MPI
232: ! - provides summary and diagnostic information if certain runtime
233: ! options are chosen (e.g., -log_summary).
235: call PetscFinalize(ierr)
236: end
238: ! ------------------------------------------------------------------------
239: !
240: ! InitialConditions - Computes the solution at the initial time.
241: !
242: ! Input Parameters:
243: ! u - uninitialized solution vector (global)
244: ! appctx - user-defined application context
245: !
246: ! Output Parameter:
247: ! u - vector with solution at initial time (global)
248: !
249: subroutine InitialConditions(u)
250: implicit none
251: #include "ex2f.h"
253: ! Input/output parameters:
254: Vec u
256: ! Local variables:
257: PetscScalar localptr(1),x
258: PetscInt i,mybase,myend
259: PetscErrorCode ierr
260: PetscOffset idx
262: ! Determine starting and ending point of each processor's range of
263: ! grid values. Note that we shift by 1 to convert from the 0-based
264: ! C convention of starting indices to the 1-based Fortran convention.
266: call VecGetOwnershipRange(u,mybase,myend,ierr)
267: mybase = mybase + 1
269: ! Get a pointer to vector data.
270: ! - For default PETSc vectors, VecGetArray() returns a pointer to
271: ! the data array. Otherwise, the routine is implementation dependent.
272: ! - You MUST call VecRestoreArray() when you no longer need access to
273: ! the array.
274: ! - Note that the Fortran interface to VecGetArray() differs from the
275: ! C version. See the users manual for details.
277: call VecGetArray(u,localptr,idx,ierr)
279: ! We initialize the solution array by simply writing the solution
280: ! directly into the array locations. Alternatively, we could use
281: ! VecSetValues() or VecSetValuesLocal().
283: do 10, i=mybase,myend
284: ! x - current location in global grid
285: x = h*(i-1)
286: localptr(i-mybase+idx+1) = one_d0 + x*x
287: 10 continue
289: ! Restore vector
291: call VecRestoreArray(u,localptr,idx,ierr)
293: ! Print debugging information if desired
294: if (debug) then
295: if (rank .eq. 0) write(6,*) 'initial guess vector'
296: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
297: endif
299: return
300: end
302: ! ------------------------------------------------------------------------
303: !
304: ! ExactSolution - Computes the exact solution at a given time.
305: !
306: ! Input Parameters:
307: ! t - current time
308: ! appctx - user-defined application context
309: !
310: ! Output Parameter:
311: ! ierr - error code
312: !
313: ! Note: The solution vector is stored in the common block!
314: !
315: subroutine ExactSolution(t,ierr)
316: implicit none
317: #include "ex2f.h"
319: ! Input/output parameters:
320: PetscReal t
321: PetscScalar x
322: PetscErrorCode ierr
324: ! Local variables:
325: PetscScalar localptr(1)
326: PetscInt i,mybase,myend
327: PetscOffset idx
329: ! Determine starting and ending point of each processors range of
330: ! grid values. Note that we shift by 1 to convert from the 0-based
331: ! C convention of starting indices to the 1-based Fortran convention.
333: call VecGetOwnershipRange(solution,mybase,myend,ierr)
334: mybase = mybase + 1
336: ! Get a pointer to vector data
337: call VecGetArray(solution,localptr,idx,ierr)
339: ! Simply write the solution directly into the array locations
340: ! Alternatively, we could use VecSetValues() or VecSetValuesLocal().
342: do 10, i=mybase,myend
343: ! x - current location in global grid
344: x = h*(i-1)
345: localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
346: 10 continue
348: ! Restore vector
350: call VecRestoreArray(solution,localptr,idx,ierr)
352: return
353: end
355: ! ------------------------------------------------------------------------
356: !
357: ! MyMonitor - A user-provided routine to monitor the solution computed at
358: ! each time-step. This example plots the solution and computes the
359: ! error in two different norms.
360: !
361: ! Input Parameters:
362: ! ts - the time-step context
363: ! step - the count of the current step (with 0 meaning the
364: ! initial condition)
365: ! time - the current time
366: ! u - the solution at this timestep
367: ! dummy - optional user-provided context for this monitoring
368: ! routine (not used here)
369: !
370: subroutine MyMonitor(ts,step,time,u,dummy,ierr)
371: implicit none
372: #include "ex2f.h"
374: ! Input/output parameters:
375: TS ts
376: PetscInt step
377: PetscObject dummy
378: PetscReal time
379: Vec u
380: PetscErrorCode ierr
382: ! Local variables:
383: PetscScalar en2,en2s,enmax
384: PetscScalar mone
385: PetscDraw draw
387: mone = -1.0d0
389: ! We use the default X windows viewer
390: ! PETSC_VIEWER_DRAW_WORLD
391: ! that is associated with the PETSC_COMM_WORLD communicator. This
392: ! saves the effort of calling PetscViewerDrawOpen() to create the window.
393: ! Note that if we wished to plot several items in separate windows we
394: ! would create each viewer with PetscViewerDrawOpen() and store them in
395: ! the application context, appctx.
396: !
397: ! Double buffering makes graphics look better.
399: call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
400: call PetscDrawSetDoubleBuffer(draw,ierr)
401: call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)
403: ! Compute the exact solution at this time-step. Note that the
404: ! solution vector is passed via the user-defined common block.
405: call ExactSolution(time,ierr)
407: ! Print debugging information if desired
408: if (debug) then
409: if (rank .eq. 0) write(6,*) 'Computed solution vector'
410: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
411: if (rank .eq. 0) write(6,*) 'Exact solution vector'
412: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
413: endif
415: ! Compute the 2-norm and max-norm of the error
416: call VecAXPY(solution,mone,u,ierr)
417: call VecNorm(solution,NORM_2,en2,ierr)
419: ! Scale the 2-norm by the grid spacing
420: en2s = sqrt(dble(h))*en2
421: call VecNorm(solution,NORM_MAX,enmax,ierr)
423: ! Print only from processor 0
424: if (rank .eq. 0) write(6,100) step,time,en2s,enmax
425: 100 format('Timestep = ',i5,',time = ',f8.3, &
426: & ' sec, error [2-norm] = ',e10.3, &
427: & ', error [max-norm] = ',e10.3)
429: ! Print debugging information if desired
430: if (debug) then
431: if (rank .eq. 0) write(6,*) 'Error vector'
432: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
433: endif
435: return
436: end
438: ! ------------------------------------------------------------------------
439: !
440: ! RHSFunction - User-provided routine that evalues the RHS function
441: ! in the ODE. This routine is set in the main program by calling
442: ! TSSetRHSFunction(). We compute:
443: ! global_out = F(global_in)
444: !
445: ! Input Parameters:
446: ! ts - timestep context
447: ! t - current time
448: ! global_in - input vector to function
449: ! dummy - (optional) user-provided context for function evaluation
450: ! (not used here because we use a common block instead)
451: !
452: ! Output Parameter:
453: ! global_out - value of function
454: !
455: subroutine RHSFunction(ts,t,global_in,global_out,dummy)
456: implicit none
457: #include "ex2f.h"
459: ! Input/output parameters:
460: TS ts
461: PetscReal t
462: Vec global_in,global_out
463: integer dummy
465: ! Local variables:
466: PetscScalar localptr(1),copyptr(1),sc
467: PetscErrorCode ierr
468: PetscInt i,localsize
469: PetscOffset idx_c,idx_l,il
471: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
472: ! Get ready for local function computations
473: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
475: ! Scatter ghost points to local vector, using the 2-step process
476: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
477: ! By placing code between these two statements, computations can be
478: ! done while messages are in transition.
480: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
481: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
483: ! Access directly the values in our local INPUT work vector
484: call VecGetArray(u_local,localptr,idx_l,ierr)
486: ! Access directly the values in our local OUTPUT work vector
487: call VecGetArray(localwork,copyptr,idx_c,ierr)
489: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
491: ! Evaluate our function on the nodes owned by this processor
492: call VecGetLocalSize(u_local,localsize,ierr)
494: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
495: ! Compute entries for the locally owned part
496: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
498: ! Handle boundary conditions: This is done by using the boundary condition
499: ! u(t,boundary) = g(t,boundary)
500: ! for some function g. Now take the derivative with respect to t to obtain
501: ! u_{t}(t,boundary) = g_{t}(t,boundary)
502: !
503: ! In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
504: ! and u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2
506: if (rank .eq. 0) copyptr(1+idx_c) = one_d0
507: if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0
509: ! Handle the interior nodes where the PDE is replace by finite
510: ! difference operators.
512: do 10, i=2,localsize-1
513: il = i + idx_l
514: copyptr(i+idx_c) = localptr(il) * sc * &
515: & (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
516: 10 continue
518: call VecRestoreArray(u_local,localptr,idx_l,ierr)
519: call VecRestoreArray(localwork,copyptr,idx_c,ierr)
521: ! Return the values from our local OUTPUT vector into our global
522: ! output vector.
524: call DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out &
525: & ,ierr)
526: call DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out, &
527: & ierr)
529: ! Print debugging information if desired
530: if (debug) then
531: if (rank .eq. 0) write(6,*) 'RHS function vector'
532: call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
533: endif
535: return
536: end
538: ! ------------------------------------------------------------------------
539: !
540: ! RHSJacobian - User-provided routine to compute the Jacobian of the
541: ! right-hand-side function.
542: !
543: ! Input Parameters:
544: ! ts - the TS context
545: ! t - current time
546: ! global_in - global input vector
547: ! dummy - optional user-defined context, as set by TSetRHSJacobian()
548: !
549: ! Output Parameters:
550: ! A - Jacobian matrix
551: ! B - optionally different preconditioning matrix
552: ! str - flag indicating matrix structure
553: !
554: ! Notes:
555: ! RHSJacobian computes entries for the locally owned part of the Jacobian.
556: ! - Currently, all PETSc parallel matrix formats are partitioned by
557: ! contiguous chunks of rows across the processors. The "grow"
558: ! parameter computed below specifies the global row number
559: ! corresponding to each local grid point.
560: ! - Each processor needs to insert only elements that it owns
561: ! locally (but any non-local elements will be sent to the
562: ! appropriate processor during matrix assembly).
563: ! - Always specify global row and columns of matrix entries.
564: ! - Here, we set all entries for a particular row at once.
565: ! - Note that MatSetValues() uses 0-based row and column numbers
566: ! in Fortran as well as in C.
568: subroutine RHSJacobian(ts,t,global_in,A,B,str,dummy)
569: implicit none
570: #include "ex2f.h"
572: ! Input/output parameters:
573: TS ts
574: PetscReal t
575: Vec global_in
576: Mat A,B
577: MatStructure str
578: integer dummy
580: ! Local variables:
581: PetscScalar localptr(1),sc,v(3)
582: PetscInt i,col(3),i1,i3
583: PetscErrorCode ierr
584: PetscInt mstart(1),mend(1)
585: PetscInt mstarts,mends
586: PetscOffset idx,is
588: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
589: ! Get ready for local Jacobian computations
590: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
592: ! Scatter ghost points to local vector, using the 2-step process
593: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
594: ! By placing code between these two statements, computations can be
595: ! done while messages are in transition.
597: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
598: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
600: ! Get pointer to vector data
601: call VecGetArray(u_local,localptr,idx,ierr)
603: ! Get starting and ending locally owned rows of the matrix
604: call MatGetOwnershipRange(A,mstarts,mends,ierr)
605: mstart(1) = mstarts
606: mend(1) = mends
608: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
609: ! Compute entries for the locally owned part of the Jacobian.
610: ! - Currently, all PETSc parallel matrix formats are partitioned by
611: ! contiguous chunks of rows across the processors.
612: ! - Each processor needs to insert only elements that it owns
613: ! locally (but any non-local elements will be sent to the
614: ! appropriate processor during matrix assembly).
615: ! - Here, we set all entries for a particular row at once.
616: ! - We can set matrix entries either using either
617: ! MatSetValuesLocal() or MatSetValues().
618: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
620: ! Set matrix rows corresponding to boundary data
621: i1 = 1
622: i3 = 3
623: if (mstart(1) .eq. 0) then
624: v(1) = zero_d0
625: call MatSetValues(A,i1,mstart,i1,mstart,v,INSERT_VALUES,ierr)
626: mstart(1) = mstart(1) + 1
627: endif
628: if (mend(1) .eq. m) then
629: mend(1) = mend(1) - 1
630: v(1) = zero_d0
631: call MatSetValues(A,i1,mend,i1,mend,v,INSERT_VALUES,ierr)
632: endif
634: ! Set matrix rows corresponding to interior data.
635: ! We construct the matrix one row at a time.
637: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
638: do 10, i=mstart(1),mend(1)-1
639: col(1) = i-1
640: col(2) = i
641: col(3) = i+1
642: is = i - mstart(1) + 1 +idx + 1
643: v(1) = sc*localptr(is)
644: v(2) = sc*(localptr(is+1) + localptr(is-1) - &
645: & four_d0*localptr(is))
646: v(3) = sc*localptr(is)
647: call MatSetValues(A,i1,i,i3,col,v,INSERT_VALUES,ierr)
648: 10 continue
650: ! Restore vector
651: call VecRestoreArray(u_local,localptr,idx,ierr)
653: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
654: ! Complete the matrix assembly process and set some options
655: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
657: ! Assemble matrix, using the 2-step process:
658: ! MatAssemblyBegin(), MatAssemblyEnd()
659: ! Computations can be done while messages are in transition,
660: ! by placing code between these two statements.
662: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
663: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
665: ! Set flag to indicate that the Jacobian matrix retains an identical
666: ! nonzero structure throughout all timestepping iterations (although the
667: ! values of the entries change). Thus, we can save some work in setting
668: ! up the preconditioner (e.g., no need to redo symbolic factorization for
669: ! ILU/ICC preconditioners).
670: ! - If the nonzero structure of the matrix is different during
671: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
672: ! must be used instead. If you are unsure whether the matrix
673: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
674: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
675: ! believes your assertion and does not check the structure
676: ! of the matrix. If you erroneously claim that the structure
677: ! is the same when it actually is not, the new preconditioner
678: ! will not function correctly. Thus, use this optimization
679: ! feature with caution!
681: str = SAME_NONZERO_PATTERN
683: ! Set and option to indicate that we will never add a new nonzero location
684: ! to the matrix. If we do, it will generate an error.
686: call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)
688: return
689: end