Actual source code: ex2f.F
petsc-3.4.5 2014-06-29
2: !
3: !/*T
4: ! Concepts: TS^time-dependent nonlinear problems
5: ! Processors: n
6: !T*/
7: !
8: ! ------------------------------------------------------------------------
9: !
10: ! This program solves a simple time-dependent nonlinear PDE using implicit
11: ! timestepping:
12: ! u * u_xx
13: ! u_t = ---------
14: ! 2*(t+1)^2
15: !
16: ! u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
17: !
18: ! The exact solution is u(t,x) = (1 + x*x) * (1 + t).
19: !
20: ! Note that since the solution is linear in time and quadratic in x,
21: ! the finite difference scheme actually computes the "exact" solution.
22: !
23: ! We use the backward Euler method.
24: !
25: ! --------------------------------------------------------------------------
27: program main
28: implicit none
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: ! Include files
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: !
34: ! Each routine within this program uses the include file 'ex2f.h',
35: ! which itself includes the various PETSc include files as well as
36: ! problem-specific data in several common blocks.
37: !
38: ! This program uses CPP for preprocessing, as indicated by the use of
39: ! PETSc include files in the directory petsc/include/finclude. This
40: ! convention enables use of the CPP preprocessor, which allows the use
41: ! of the #include statements that define PETSc objects and variables.
42: !
43: ! Use of the conventional Fortran include statements is also supported
44: ! In this case, the PETsc include files are located in the directory
45: ! petsc/include/foldinclude.
46: !
47: ! Since one must be very careful to include each file no more than once
48: ! in a Fortran routine, application programmers must exlicitly list
49: ! each file needed for the various PETSc components within their
50: ! program (unlike the C/C++ interface).
51: !
52: ! See the Fortran section of the PETSc users manual for details.
54: #include "ex2f.h"
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Variable declarations
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: !
60: ! Variables:
61: ! ts - timestepping solver
62: ! A - Jacobian matrix context
63: ! u_local - local vector
64: ! u - approximate solution vector
65: ! ftime - final time
66: ! time_total_max - default total max time
67: ! time_steps_max - default max timesteps
68: !
69: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
70: ! are mathematical objects that contain more than just an array of
71: ! double precision numbers. I.e., vectors in PETSc are not just
72: ! double precision x(*).
73: ! However, local vector data can be easily accessed via VecGetArray().
74: ! See the Fortran section of the PETSc users manual for details.
76: TS ts
77: Vec u
78: Mat A
79: PetscBool flg,fdjac,monitor
80: PetscErrorCode ierr
81: PetscInt time_steps_max,i1,steps
82: PetscReal dt,time_total_max,ftime,norm
84: ! Note: Any user-defined Fortran routines (such as RHSFunction)
85: ! MUST be declared as external.
87: external MyMonitor,RHSFunction
88: external InitialConditions,RHSJacobian
90: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
91: ! Beginning of program
92: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
95: comm = PETSC_COMM_WORLD
96: call MPI_Comm_size(comm,size,ierr)
97: call MPI_Comm_rank(comm,rank,ierr)
99: ! Initialize problem parameters
101: time_steps_max = 1000
102: time_total_max = 100.0
103: m = 60
104: debug = PETSC_FALSE
105: monitor = PETSC_FALSE
106: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
107: call PetscOptionsGetBool(PETSC_NULL_CHARACTER,'-debug',debug,flg, &
108: & ierr)
109: call PetscOptionsGetBool(PETSC_NULL_CHARACTER,'-monitor',monitor, &
110: & flg,ierr)
111: one_d0 = 1.0d0
112: two_d0 = 2.0d0
113: four_d0 = 4.0d0
114: h = one_d0/(m-one_d0)
116: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: ! Create vector data structures
118: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: ! Create distributed array (DMDA) to manage parallel grid and vectors
121: ! Set up the ghost point communication pattern. There are m total
122: ! grid values spread equally among all the processors.
123: i1 = 1
124: call DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,m,i1,i1, &
125: & PETSC_NULL_INTEGER,da,ierr)
127: ! Extract global and local vectors from DMDA; then duplicate for remaining
128: ! vectors that are the same types.
130: call DMCreateGlobalVector(da,u,ierr)
131: call DMCreateLocalVector(da,u_local,ierr)
133: ! Make local work vector for evaluating right-hand-side function
134: call VecDuplicate(u_local,localwork,ierr)
136: ! Make global work vector for storing exact solution
137: call VecDuplicate(u,solution,ierr)
139: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: ! Create timestepping solver context; set callback routine for
141: ! right-hand-side function evaluation.
142: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: call TSCreate(comm,ts,ierr)
145: call TSSetProblemType(ts,TS_NONLINEAR,ierr)
146: call TSSetRHSFunction(ts,PETSC_NULL_OBJECT,RHSFunction, &
147: & PETSC_NULL_OBJECT,ierr)
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: ! Set optional user-defined monitoring routine
151: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: if (monitor) then
155: call TSMonitorSet(ts,MyMonitor,PETSC_NULL_OBJECT, &
156: & PETSC_NULL_FUNCTION,ierr)
157: endif
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: ! For nonlinear problems, the user can provide a Jacobian evaluation
161: ! routine (or use a finite differencing approximation).
162: !
163: ! Create matrix data structure; set Jacobian evaluation routine.
164: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: call MatCreate(comm,A,ierr)
167: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m,ierr)
168: call MatSetFromOptions(A,ierr)
169: call MatSetUp(A,ierr)
170: call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT,ierr)
172: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: ! Set solution vector and initial timestep
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: dt = h/two_d0
177: call TSSetInitialTimeStep(ts,0.d0,dt,ierr)
179: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: ! Customize timestepping solver:
181: ! - Set the solution method to be the Backward Euler method.
182: ! - Set timestepping duration info
183: ! Then set runtime options, which can override these defaults.
184: ! For example,
185: ! -ts_max_steps <maxsteps> -ts_final_time <maxtime>
186: ! to override the defaults set by TSSetDuration().
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: call TSSetType(ts,TSTHETA,ierr)
190: call TSThetaSetTheta(ts,one_d0,ierr)
191: call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
192: call TSSetFromOptions(ts,ierr)
194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: ! Solve the problem
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: ! Evaluate initial conditions
200: call InitialConditions(u)
202: ! Run the timestepping solver
204: call TSSolve(ts,u,ierr)
205: call TSGetSolveTime(ts,ftime,ierr)
206: call TSGetTimeStepNumber(ts,steps,ierr)
207: call VecNorm(u,NORM_2,norm,ierr)
209: write(6,100) steps,ftime,norm
210: 100 format('Number of pseudo time-steps ',i5,' final time ',1pe9.2, &
211: & ' solution norm ',1pe9.2)
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Free work space. All PETSc objects should be destroyed when they
215: ! are no longer needed.
216: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: call TSDestroy(ts,ierr)
219: call VecDestroy(localwork,ierr)
220: call VecDestroy(solution,ierr)
221: call VecDestroy(u_local,ierr)
222: call VecDestroy(u,ierr)
223: call DMDestroy(da,ierr)
224: call MatDestroy(A,ierr)
226: ! Always call PetscFinalize() before exiting a program. This routine
227: ! - finalizes the PETSc libraries as well as MPI
228: ! - provides summary and diagnostic information if certain runtime
229: ! options are chosen (e.g., -log_summary).
231: call PetscFinalize(ierr)
232: end
234: ! ------------------------------------------------------------------------
235: !
236: ! InitialConditions - Computes the solution at the initial time.
237: !
238: ! Input Parameters:
239: ! u - uninitialized solution vector (global)
240: ! appctx - user-defined application context
241: !
242: ! Output Parameter:
243: ! u - vector with solution at initial time (global)
244: !
245: subroutine InitialConditions(u)
246: implicit none
247: #include "ex2f.h"
249: ! Input/output parameters:
250: Vec u
252: ! Local variables:
253: PetscScalar localptr(1),x
254: PetscInt i,mybase,myend
255: PetscErrorCode ierr
256: PetscOffset idx
258: ! Determine starting and ending point of each processor's range of
259: ! grid values. Note that we shift by 1 to convert from the 0-based
260: ! C convention of starting indices to the 1-based Fortran convention.
262: call VecGetOwnershipRange(u,mybase,myend,ierr)
263: mybase = mybase + 1
265: ! Get a pointer to vector data.
266: ! - For default PETSc vectors, VecGetArray() returns a pointer to
267: ! the data array. Otherwise, the routine is implementation dependent.
268: ! - You MUST call VecRestoreArray() when you no longer need access to
269: ! the array.
270: ! - Note that the Fortran interface to VecGetArray() differs from the
271: ! C version. See the users manual for details.
273: call VecGetArray(u,localptr,idx,ierr)
275: ! We initialize the solution array by simply writing the solution
276: ! directly into the array locations. Alternatively, we could use
277: ! VecSetValues() or VecSetValuesLocal().
279: do 10, i=mybase,myend
280: ! x - current location in global grid
281: x = h*(i-1)
282: localptr(i-mybase+idx+1) = one_d0 + x*x
283: 10 continue
285: ! Restore vector
287: call VecRestoreArray(u,localptr,idx,ierr)
289: ! Print debugging information if desired
290: if (debug) then
291: if (rank .eq. 0) write(6,*) 'initial guess vector'
292: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
293: endif
295: return
296: end
298: ! ------------------------------------------------------------------------
299: !
300: ! ExactSolution - Computes the exact solution at a given time.
301: !
302: ! Input Parameters:
303: ! t - current time
304: ! appctx - user-defined application context
305: !
306: ! Output Parameter:
307: ! ierr - error code
308: !
309: ! Note: The solution vector is stored in the common block!
310: !
311: subroutine ExactSolution(t,ierr)
312: implicit none
313: #include "ex2f.h"
315: ! Input/output parameters:
316: PetscReal t
317: PetscScalar x
318: PetscErrorCode ierr
320: ! Local variables:
321: PetscScalar localptr(1)
322: PetscInt i,mybase,myend
323: PetscOffset idx
325: ! Determine starting and ending point of each processors range of
326: ! grid values. Note that we shift by 1 to convert from the 0-based
327: ! C convention of starting indices to the 1-based Fortran convention.
329: call VecGetOwnershipRange(solution,mybase,myend,ierr)
330: mybase = mybase + 1
332: ! Get a pointer to vector data
333: call VecGetArray(solution,localptr,idx,ierr)
335: ! Simply write the solution directly into the array locations
336: ! Alternatively, we could use VecSetValues() or VecSetValuesLocal().
338: do 10, i=mybase,myend
339: ! x - current location in global grid
340: x = h*(i-1)
341: localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
342: 10 continue
344: ! Restore vector
346: call VecRestoreArray(solution,localptr,idx,ierr)
348: return
349: end
351: ! ------------------------------------------------------------------------
352: !
353: ! MyMonitor - A user-provided routine to monitor the solution computed at
354: ! each time-step. This example plots the solution and computes the
355: ! error in two different norms.
356: !
357: ! Input Parameters:
358: ! ts - the time-step context
359: ! step - the count of the current step (with 0 meaning the
360: ! initial condition)
361: ! time - the current time
362: ! u - the solution at this timestep
363: ! dummy - optional user-provided context for this monitoring
364: ! routine (not used here)
365: !
366: subroutine MyMonitor(ts,step,time,u,dummy,ierr)
367: implicit none
368: #include "ex2f.h"
370: ! Input/output parameters:
371: TS ts
372: PetscInt step
373: PetscObject dummy
374: PetscReal time
375: Vec u
376: PetscErrorCode ierr
378: ! Local variables:
379: PetscScalar en2,en2s,enmax
380: PetscScalar mone
381: PetscDraw draw
383: mone = -1.0d0
385: ! We use the default X windows viewer
386: ! PETSC_VIEWER_DRAW_WORLD
387: ! that is associated with the PETSC_COMM_WORLD communicator. This
388: ! saves the effort of calling PetscViewerDrawOpen() to create the window.
389: ! Note that if we wished to plot several items in separate windows we
390: ! would create each viewer with PetscViewerDrawOpen() and store them in
391: ! the application context, appctx.
392: !
393: ! Double buffering makes graphics look better.
395: call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
396: call PetscDrawSetDoubleBuffer(draw,ierr)
397: call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)
399: ! Compute the exact solution at this time-step. Note that the
400: ! solution vector is passed via the user-defined common block.
401: call ExactSolution(time,ierr)
403: ! Print debugging information if desired
404: if (debug) then
405: if (rank .eq. 0) write(6,*) 'Computed solution vector'
406: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
407: if (rank .eq. 0) write(6,*) 'Exact solution vector'
408: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
409: endif
411: ! Compute the 2-norm and max-norm of the error
412: call VecAXPY(solution,mone,u,ierr)
413: call VecNorm(solution,NORM_2,en2,ierr)
415: ! Scale the 2-norm by the grid spacing
416: en2s = sqrt(dble(h))*en2
417: call VecNorm(solution,NORM_MAX,enmax,ierr)
419: ! Print only from processor 0
420: if (rank .eq. 0) write(6,100) step,time,en2s,enmax
421: 100 format('Timestep = ',i5,',time = ',f8.3, &
422: & ' sec, error [2-norm] = ',e10.3, &
423: & ', error [max-norm] = ',e10.3)
425: ! Print debugging information if desired
426: if (debug) then
427: if (rank .eq. 0) write(6,*) 'Error vector'
428: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
429: endif
431: return
432: end
434: ! ------------------------------------------------------------------------
435: !
436: ! RHSFunction - User-provided routine that evalues the RHS function
437: ! in the ODE. This routine is set in the main program by calling
438: ! TSSetRHSFunction(). We compute:
439: ! global_out = F(global_in)
440: !
441: ! Input Parameters:
442: ! ts - timestep context
443: ! t - current time
444: ! global_in - input vector to function
445: ! dummy - (optional) user-provided context for function evaluation
446: ! (not used here because we use a common block instead)
447: !
448: ! Output Parameter:
449: ! global_out - value of function
450: !
451: subroutine RHSFunction(ts,t,global_in,global_out,dummy)
452: implicit none
453: #include "ex2f.h"
455: ! Input/output parameters:
456: TS ts
457: PetscReal t
458: Vec global_in,global_out
459: integer dummy
461: ! Local variables:
462: PetscScalar localptr(1),copyptr(1),sc
463: PetscErrorCode ierr
464: PetscInt i,localsize
465: PetscOffset idx_c,idx_l,il
467: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
468: ! Get ready for local function computations
469: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
471: ! Scatter ghost points to local vector, using the 2-step process
472: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
473: ! By placing code between these two statements, computations can be
474: ! done while messages are in transition.
476: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
477: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
479: ! Access directly the values in our local INPUT work vector
480: call VecGetArray(u_local,localptr,idx_l,ierr)
482: ! Access directly the values in our local OUTPUT work vector
483: call VecGetArray(localwork,copyptr,idx_c,ierr)
485: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
487: ! Evaluate our function on the nodes owned by this processor
488: call VecGetLocalSize(u_local,localsize,ierr)
490: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
491: ! Compute entries for the locally owned part
492: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
494: ! Handle boundary conditions: This is done by using the boundary condition
495: ! u(t,boundary) = g(t,boundary)
496: ! for some function g. Now take the derivative with respect to t to obtain
497: ! u_{t}(t,boundary) = g_{t}(t,boundary)
498: !
499: ! In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
500: ! and u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2
502: if (rank .eq. 0) copyptr(1+idx_c) = one_d0
503: if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0
505: ! Handle the interior nodes where the PDE is replace by finite
506: ! difference operators.
508: do 10, i=2,localsize-1
509: il = i + idx_l
510: copyptr(i+idx_c) = localptr(il) * sc * &
511: & (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
512: 10 continue
514: call VecRestoreArray(u_local,localptr,idx_l,ierr)
515: call VecRestoreArray(localwork,copyptr,idx_c,ierr)
517: ! Return the values from our local OUTPUT vector into our global
518: ! output vector.
520: call DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out &
521: & ,ierr)
522: call DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out, &
523: & ierr)
525: ! Print debugging information if desired
526: if (debug) then
527: if (rank .eq. 0) write(6,*) 'RHS function vector'
528: call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
529: endif
531: return
532: end
534: ! ------------------------------------------------------------------------
535: !
536: ! RHSJacobian - User-provided routine to compute the Jacobian of the
537: ! right-hand-side function.
538: !
539: ! Input Parameters:
540: ! ts - the TS context
541: ! t - current time
542: ! global_in - global input vector
543: ! dummy - optional user-defined context, as set by TSetRHSJacobian()
544: !
545: ! Output Parameters:
546: ! A - Jacobian matrix
547: ! B - optionally different preconditioning matrix
548: ! str - flag indicating matrix structure
549: !
550: ! Notes:
551: ! RHSJacobian computes entries for the locally owned part of the Jacobian.
552: ! - Currently, all PETSc parallel matrix formats are partitioned by
553: ! contiguous chunks of rows across the processors. The "grow"
554: ! parameter computed below specifies the global row number
555: ! corresponding to each local grid point.
556: ! - Each processor needs to insert only elements that it owns
557: ! locally (but any non-local elements will be sent to the
558: ! appropriate processor during matrix assembly).
559: ! - Always specify global row and columns of matrix entries.
560: ! - Here, we set all entries for a particular row at once.
561: ! - Note that MatSetValues() uses 0-based row and column numbers
562: ! in Fortran as well as in C.
564: subroutine RHSJacobian(ts,t,global_in,A,B,str,dummy)
565: implicit none
566: #include "ex2f.h"
568: ! Input/output parameters:
569: TS ts
570: PetscReal t
571: Vec global_in
572: Mat A,B
573: MatStructure str
574: integer dummy
576: ! Local variables:
577: PetscScalar localptr(1),sc,v(3)
578: PetscInt i,col(3),i1,i3
579: PetscErrorCode ierr
580: PetscInt mstart(1),mend(1)
581: PetscInt mstarts,mends
582: PetscOffset idx,is
584: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
585: ! Get ready for local Jacobian computations
586: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
588: ! Scatter ghost points to local vector, using the 2-step process
589: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
590: ! By placing code between these two statements, computations can be
591: ! done while messages are in transition.
593: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
594: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
596: ! Get pointer to vector data
597: call VecGetArray(u_local,localptr,idx,ierr)
599: ! Get starting and ending locally owned rows of the matrix
600: call MatGetOwnershipRange(A,mstarts,mends,ierr)
601: mstart(1) = mstarts
602: mend(1) = mends
604: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
605: ! Compute entries for the locally owned part of the Jacobian.
606: ! - Currently, all PETSc parallel matrix formats are partitioned by
607: ! contiguous chunks of rows across the processors.
608: ! - Each processor needs to insert only elements that it owns
609: ! locally (but any non-local elements will be sent to the
610: ! appropriate processor during matrix assembly).
611: ! - Here, we set all entries for a particular row at once.
612: ! - We can set matrix entries either using either
613: ! MatSetValuesLocal() or MatSetValues().
614: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
616: ! Set matrix rows corresponding to boundary data
617: i1 = 1
618: i3 = 3
619: if (mstart(1) .eq. 0) then
620: v(1) = zero_d0
621: call MatSetValues(A,i1,mstart,i1,mstart,v,INSERT_VALUES,ierr)
622: mstart(1) = mstart(1) + 1
623: endif
624: if (mend(1) .eq. m) then
625: mend(1) = mend(1) - 1
626: v(1) = zero_d0
627: call MatSetValues(A,i1,mend,i1,mend,v,INSERT_VALUES,ierr)
628: endif
630: ! Set matrix rows corresponding to interior data.
631: ! We construct the matrix one row at a time.
633: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
634: do 10, i=mstart(1),mend(1)-1
635: col(1) = i-1
636: col(2) = i
637: col(3) = i+1
638: is = i - mstart(1) + 1 +idx + 1
639: v(1) = sc*localptr(is)
640: v(2) = sc*(localptr(is+1) + localptr(is-1) - &
641: & four_d0*localptr(is))
642: v(3) = sc*localptr(is)
643: call MatSetValues(A,i1,i,i3,col,v,INSERT_VALUES,ierr)
644: 10 continue
646: ! Restore vector
647: call VecRestoreArray(u_local,localptr,idx,ierr)
649: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
650: ! Complete the matrix assembly process and set some options
651: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
653: ! Assemble matrix, using the 2-step process:
654: ! MatAssemblyBegin(), MatAssemblyEnd()
655: ! Computations can be done while messages are in transition,
656: ! by placing code between these two statements.
658: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
659: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
661: ! Set flag to indicate that the Jacobian matrix retains an identical
662: ! nonzero structure throughout all timestepping iterations (although the
663: ! values of the entries change). Thus, we can save some work in setting
664: ! up the preconditioner (e.g., no need to redo symbolic factorization for
665: ! ILU/ICC preconditioners).
666: ! - If the nonzero structure of the matrix is different during
667: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
668: ! must be used instead. If you are unsure whether the matrix
669: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
670: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
671: ! believes your assertion and does not check the structure
672: ! of the matrix. If you erroneously claim that the structure
673: ! is the same when it actually is not, the new preconditioner
674: ! will not function correctly. Thus, use this optimization
675: ! feature with caution!
677: str = SAME_NONZERO_PATTERN
679: ! Set and option to indicate that we will never add a new nonzero location
680: ! to the matrix. If we do, it will generate an error.
682: call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)
684: return
685: end