Actual source code: ex2f.F
petsc-3.5.4 2015-05-23
2: !
3: !/*T
4: ! Concepts: TS^time-dependent nonlinear problems
5: ! Processors: n
6: !T*/
7: !
8: ! ------------------------------------------------------------------------
9: !
10: ! This program solves a simple time-dependent nonlinear PDE using implicit
11: ! timestepping:
12: ! u * u_xx
13: ! u_t = ---------
14: ! 2*(t+1)^2
15: !
16: ! u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
17: !
18: ! The exact solution is u(t,x) = (1 + x*x) * (1 + t).
19: !
20: ! Note that since the solution is linear in time and quadratic in x,
21: ! the finite difference scheme actually computes the "exact" solution.
22: !
23: ! We use the backward Euler method.
24: !
25: ! --------------------------------------------------------------------------
27: program main
28: implicit none
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: ! Include files
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: !
34: ! Each routine within this program uses the include file 'ex2f.h',
35: ! which itself includes the various PETSc include files as well as
36: ! problem-specific data in several common blocks.
37: !
38: ! This program uses CPP for preprocessing, as indicated by the use of
39: ! PETSc include files in the directory petsc/include/finclude. This
40: ! convention enables use of the CPP preprocessor, which allows the use
41: ! of the #include statements that define PETSc objects and variables.
42: !
43: ! Use of the conventional Fortran include statements is also supported
44: ! In this case, the PETsc include files are located in the directory
45: ! petsc/include/foldinclude.
46: !
47: ! Since one must be very careful to include each file no more than once
48: ! in a Fortran routine, application programmers must exlicitly list
49: ! each file needed for the various PETSc components within their
50: ! program (unlike the C/C++ interface).
51: !
52: ! See the Fortran section of the PETSc users manual for details.
54: #include "ex2f.h"
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Variable declarations
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: !
60: ! Variables:
61: ! ts - timestepping solver
62: ! A - Jacobian matrix context
63: ! u_local - local vector
64: ! u - approximate solution vector
65: ! ftime - final time
66: ! time_total_max - default total max time
67: ! time_steps_max - default max timesteps
68: !
69: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
70: ! are mathematical objects that contain more than just an array of
71: ! double precision numbers. I.e., vectors in PETSc are not just
72: ! double precision x(*).
73: ! However, local vector data can be easily accessed via VecGetArray().
74: ! See the Fortran section of the PETSc users manual for details.
76: TS ts
77: Vec u
78: Mat A
79: PetscBool flg,fdjac,monitor
80: PetscErrorCode ierr
81: PetscInt time_steps_max,i1,steps
82: PetscReal dt,time_total_max,ftime,norm
83: PetscReal zero
85: ! Note: Any user-defined Fortran routines (such as RHSFunction)
86: ! MUST be declared as external.
88: external MyMonitor,RHSFunction
89: external InitialConditions,RHSJacobian
91: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
92: ! Beginning of program
93: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
96: comm = PETSC_COMM_WORLD
97: call MPI_Comm_size(comm,size,ierr)
98: call MPI_Comm_rank(comm,rank,ierr)
100: ! Initialize problem parameters
102: time_steps_max = 1000
103: time_total_max = 100.0
104: m = 60
105: debug = PETSC_FALSE
106: monitor = PETSC_FALSE
107: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
108: call PetscOptionsGetBool(PETSC_NULL_CHARACTER,'-debug',debug,flg, &
109: & ierr)
110: call PetscOptionsGetBool(PETSC_NULL_CHARACTER,'-monitor',monitor, &
111: & flg,ierr)
112: one_d0 = 1.0d0
113: two_d0 = 2.0d0
114: four_d0 = 4.0d0
115: h = one_d0/(m-one_d0)
117: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: ! Create vector data structures
119: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: ! Create distributed array (DMDA) to manage parallel grid and vectors
122: ! Set up the ghost point communication pattern. There are m total
123: ! grid values spread equally among all the processors.
124: i1 = 1
125: call DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,i1,i1, &
126: & PETSC_NULL_INTEGER,da,ierr)
128: ! Extract global and local vectors from DMDA; then duplicate for remaining
129: ! vectors that are the same types.
131: call DMCreateGlobalVector(da,u,ierr)
132: call DMCreateLocalVector(da,u_local,ierr)
134: ! Make local work vector for evaluating right-hand-side function
135: call VecDuplicate(u_local,localwork,ierr)
137: ! Make global work vector for storing exact solution
138: call VecDuplicate(u,solution,ierr)
140: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: ! Create timestepping solver context; set callback routine for
142: ! right-hand-side function evaluation.
143: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: call TSCreate(comm,ts,ierr)
146: call TSSetProblemType(ts,TS_NONLINEAR,ierr)
147: call TSSetRHSFunction(ts,PETSC_NULL_OBJECT,RHSFunction, &
148: & PETSC_NULL_OBJECT,ierr)
150: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: ! Set optional user-defined monitoring routine
152: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155: if (monitor) then
156: call TSMonitorSet(ts,MyMonitor,PETSC_NULL_OBJECT, &
157: & PETSC_NULL_FUNCTION,ierr)
158: endif
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: ! For nonlinear problems, the user can provide a Jacobian evaluation
162: ! routine (or use a finite differencing approximation).
163: !
164: ! Create matrix data structure; set Jacobian evaluation routine.
165: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: call MatCreate(comm,A,ierr)
168: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m,ierr)
169: call MatSetFromOptions(A,ierr)
170: call MatSetUp(A,ierr)
171: call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT,ierr)
173: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: ! Set solution vector and initial timestep
175: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: dt = h/two_d0
178: zero = 0.0
179: call TSSetInitialTimeStep(ts,zero,dt,ierr)
181: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: ! Customize timestepping solver:
183: ! - Set the solution method to be the Backward Euler method.
184: ! - Set timestepping duration info
185: ! Then set runtime options, which can override these defaults.
186: ! For example,
187: ! -ts_max_steps <maxsteps> -ts_final_time <maxtime>
188: ! to override the defaults set by TSSetDuration().
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: call TSSetType(ts,TSTHETA,ierr)
192: call TSThetaSetTheta(ts,one_d0,ierr)
193: call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
194: call TSSetFromOptions(ts,ierr)
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: ! Solve the problem
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: ! Evaluate initial conditions
202: call InitialConditions(u)
204: ! Run the timestepping solver
206: call TSSolve(ts,u,ierr)
207: call TSGetSolveTime(ts,ftime,ierr)
208: call TSGetTimeStepNumber(ts,steps,ierr)
209: call VecNorm(u,NORM_2,norm,ierr)
211: write(6,100) steps,ftime,norm
212: 100 format('Number of pseudo time-steps ',i5,' final time ',1pe9.2, &
213: & ' solution norm ',1pe9.2)
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: ! Free work space. All PETSc objects should be destroyed when they
217: ! are no longer needed.
218: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: call TSDestroy(ts,ierr)
221: call VecDestroy(localwork,ierr)
222: call VecDestroy(solution,ierr)
223: call VecDestroy(u_local,ierr)
224: call VecDestroy(u,ierr)
225: call DMDestroy(da,ierr)
226: call MatDestroy(A,ierr)
228: ! Always call PetscFinalize() before exiting a program. This routine
229: ! - finalizes the PETSc libraries as well as MPI
230: ! - provides summary and diagnostic information if certain runtime
231: ! options are chosen (e.g., -log_summary).
233: call PetscFinalize(ierr)
234: end
236: ! ------------------------------------------------------------------------
237: !
238: ! InitialConditions - Computes the solution at the initial time.
239: !
240: ! Input Parameters:
241: ! u - uninitialized solution vector (global)
242: ! appctx - user-defined application context
243: !
244: ! Output Parameter:
245: ! u - vector with solution at initial time (global)
246: !
247: subroutine InitialConditions(u)
248: implicit none
249: #include "ex2f.h"
251: ! Input/output parameters:
252: Vec u
254: ! Local variables:
255: PetscScalar localptr(1),x
256: PetscInt i,mybase,myend
257: PetscErrorCode ierr
258: PetscOffset idx
260: ! Determine starting and ending point of each processor's range of
261: ! grid values. Note that we shift by 1 to convert from the 0-based
262: ! C convention of starting indices to the 1-based Fortran convention.
264: call VecGetOwnershipRange(u,mybase,myend,ierr)
265: mybase = mybase + 1
267: ! Get a pointer to vector data.
268: ! - For default PETSc vectors, VecGetArray() returns a pointer to
269: ! the data array. Otherwise, the routine is implementation dependent.
270: ! - You MUST call VecRestoreArray() when you no longer need access to
271: ! the array.
272: ! - Note that the Fortran interface to VecGetArray() differs from the
273: ! C version. See the users manual for details.
275: call VecGetArray(u,localptr,idx,ierr)
277: ! We initialize the solution array by simply writing the solution
278: ! directly into the array locations. Alternatively, we could use
279: ! VecSetValues() or VecSetValuesLocal().
281: do 10, i=mybase,myend
282: ! x - current location in global grid
283: x = h*(i-1)
284: localptr(i-mybase+idx+1) = one_d0 + x*x
285: 10 continue
287: ! Restore vector
289: call VecRestoreArray(u,localptr,idx,ierr)
291: ! Print debugging information if desired
292: if (debug) then
293: if (rank .eq. 0) write(6,*) 'initial guess vector'
294: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
295: endif
297: return
298: end
300: ! ------------------------------------------------------------------------
301: !
302: ! ExactSolution - Computes the exact solution at a given time.
303: !
304: ! Input Parameters:
305: ! t - current time
306: ! appctx - user-defined application context
307: !
308: ! Output Parameter:
309: ! ierr - error code
310: !
311: ! Note: The solution vector is stored in the common block!
312: !
313: subroutine ExactSolution(t,ierr)
314: implicit none
315: #include "ex2f.h"
317: ! Input/output parameters:
318: PetscReal t
319: PetscScalar x
320: PetscErrorCode ierr
322: ! Local variables:
323: PetscScalar localptr(1)
324: PetscInt i,mybase,myend
325: PetscOffset idx
327: ! Determine starting and ending point of each processors range of
328: ! grid values. Note that we shift by 1 to convert from the 0-based
329: ! C convention of starting indices to the 1-based Fortran convention.
331: call VecGetOwnershipRange(solution,mybase,myend,ierr)
332: mybase = mybase + 1
334: ! Get a pointer to vector data
335: call VecGetArray(solution,localptr,idx,ierr)
337: ! Simply write the solution directly into the array locations
338: ! Alternatively, we could use VecSetValues() or VecSetValuesLocal().
340: do 10, i=mybase,myend
341: ! x - current location in global grid
342: x = h*(i-1)
343: localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
344: 10 continue
346: ! Restore vector
348: call VecRestoreArray(solution,localptr,idx,ierr)
350: return
351: end
353: ! ------------------------------------------------------------------------
354: !
355: ! MyMonitor - A user-provided routine to monitor the solution computed at
356: ! each time-step. This example plots the solution and computes the
357: ! error in two different norms.
358: !
359: ! Input Parameters:
360: ! ts - the time-step context
361: ! step - the count of the current step (with 0 meaning the
362: ! initial condition)
363: ! time - the current time
364: ! u - the solution at this timestep
365: ! dummy - optional user-provided context for this monitoring
366: ! routine (not used here)
367: !
368: subroutine MyMonitor(ts,step,time,u,dummy,ierr)
369: implicit none
370: #include "ex2f.h"
372: ! Input/output parameters:
373: TS ts
374: PetscInt step
375: PetscObject dummy
376: PetscReal time
377: Vec u
378: PetscErrorCode ierr
380: ! Local variables:
381: PetscScalar en2,en2s,enmax
382: PetscScalar mone
383: PetscDraw draw
385: mone = -1.0d0
387: ! We use the default X windows viewer
388: ! PETSC_VIEWER_DRAW_WORLD
389: ! that is associated with the PETSC_COMM_WORLD communicator. This
390: ! saves the effort of calling PetscViewerDrawOpen() to create the window.
391: ! Note that if we wished to plot several items in separate windows we
392: ! would create each viewer with PetscViewerDrawOpen() and store them in
393: ! the application context, appctx.
394: !
395: ! Double buffering makes graphics look better.
397: call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
398: call PetscDrawSetDoubleBuffer(draw,ierr)
399: call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)
401: ! Compute the exact solution at this time-step. Note that the
402: ! solution vector is passed via the user-defined common block.
403: call ExactSolution(time,ierr)
405: ! Print debugging information if desired
406: if (debug) then
407: if (rank .eq. 0) write(6,*) 'Computed solution vector'
408: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
409: if (rank .eq. 0) write(6,*) 'Exact solution vector'
410: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
411: endif
413: ! Compute the 2-norm and max-norm of the error
414: call VecAXPY(solution,mone,u,ierr)
415: call VecNorm(solution,NORM_2,en2,ierr)
417: ! Scale the 2-norm by the grid spacing
418: en2s = sqrt(dble(h))*en2
419: call VecNorm(solution,NORM_MAX,enmax,ierr)
421: ! Print only from processor 0
422: if (rank .eq. 0) write(6,100) step,time,en2s,enmax
423: 100 format('Timestep = ',i5,',time = ',f8.3, &
424: & ' sec, error [2-norm] = ',e10.3, &
425: & ', error [max-norm] = ',e10.3)
427: ! Print debugging information if desired
428: if (debug) then
429: if (rank .eq. 0) write(6,*) 'Error vector'
430: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
431: endif
433: return
434: end
436: ! ------------------------------------------------------------------------
437: !
438: ! RHSFunction - User-provided routine that evalues the RHS function
439: ! in the ODE. This routine is set in the main program by calling
440: ! TSSetRHSFunction(). We compute:
441: ! global_out = F(global_in)
442: !
443: ! Input Parameters:
444: ! ts - timestep context
445: ! t - current time
446: ! global_in - input vector to function
447: ! dummy - (optional) user-provided context for function evaluation
448: ! (not used here because we use a common block instead)
449: !
450: ! Output Parameter:
451: ! global_out - value of function
452: !
453: subroutine RHSFunction(ts,t,global_in,global_out,dummy)
454: implicit none
455: #include "ex2f.h"
457: ! Input/output parameters:
458: TS ts
459: PetscReal t
460: Vec global_in,global_out
461: integer dummy
463: ! Local variables:
464: PetscScalar localptr(1),copyptr(1),sc
465: PetscErrorCode ierr
466: PetscInt i,localsize
467: PetscOffset idx_c,idx_l,il
469: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
470: ! Get ready for local function computations
471: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
473: ! Scatter ghost points to local vector, using the 2-step process
474: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
475: ! By placing code between these two statements, computations can be
476: ! done while messages are in transition.
478: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
479: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
481: ! Access directly the values in our local INPUT work vector
482: call VecGetArray(u_local,localptr,idx_l,ierr)
484: ! Access directly the values in our local OUTPUT work vector
485: call VecGetArray(localwork,copyptr,idx_c,ierr)
487: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
489: ! Evaluate our function on the nodes owned by this processor
490: call VecGetLocalSize(u_local,localsize,ierr)
492: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
493: ! Compute entries for the locally owned part
494: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
496: ! Handle boundary conditions: This is done by using the boundary condition
497: ! u(t,boundary) = g(t,boundary)
498: ! for some function g. Now take the derivative with respect to t to obtain
499: ! u_{t}(t,boundary) = g_{t}(t,boundary)
500: !
501: ! In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
502: ! and u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2
504: if (rank .eq. 0) copyptr(1+idx_c) = one_d0
505: if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0
507: ! Handle the interior nodes where the PDE is replace by finite
508: ! difference operators.
510: do 10, i=2,localsize-1
511: il = i + idx_l
512: copyptr(i+idx_c) = localptr(il) * sc * &
513: & (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
514: 10 continue
516: call VecRestoreArray(u_local,localptr,idx_l,ierr)
517: call VecRestoreArray(localwork,copyptr,idx_c,ierr)
519: ! Return the values from our local OUTPUT vector into our global
520: ! output vector.
522: call DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out &
523: & ,ierr)
524: call DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out, &
525: & ierr)
527: ! Print debugging information if desired
528: if (debug) then
529: if (rank .eq. 0) write(6,*) 'RHS function vector'
530: call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
531: endif
533: return
534: end
536: ! ------------------------------------------------------------------------
537: !
538: ! RHSJacobian - User-provided routine to compute the Jacobian of the
539: ! right-hand-side function.
540: !
541: ! Input Parameters:
542: ! ts - the TS context
543: ! t - current time
544: ! global_in - global input vector
545: ! dummy - optional user-defined context, as set by TSetRHSJacobian()
546: !
547: ! Output Parameters:
548: ! A - Jacobian matrix
549: ! B - optionally different preconditioning matrix
550: ! str - flag indicating matrix structure
551: !
552: ! Notes:
553: ! RHSJacobian computes entries for the locally owned part of the Jacobian.
554: ! - Currently, all PETSc parallel matrix formats are partitioned by
555: ! contiguous chunks of rows across the processors. The "grow"
556: ! parameter computed below specifies the global row number
557: ! corresponding to each local grid point.
558: ! - Each processor needs to insert only elements that it owns
559: ! locally (but any non-local elements will be sent to the
560: ! appropriate processor during matrix assembly).
561: ! - Always specify global row and columns of matrix entries.
562: ! - Here, we set all entries for a particular row at once.
563: ! - Note that MatSetValues() uses 0-based row and column numbers
564: ! in Fortran as well as in C.
566: subroutine RHSJacobian(ts,t,global_in,A,B,dummy)
567: implicit none
568: #include "ex2f.h"
570: ! Input/output parameters:
571: TS ts
572: PetscReal t
573: Vec global_in
574: Mat A,B
575: integer dummy
577: ! Local variables:
578: PetscScalar localptr(1),sc,v(3)
579: PetscInt i,col(3),i1,i3
580: PetscErrorCode ierr
581: PetscInt mstart(1),mend(1)
582: PetscInt mstarts,mends
583: PetscOffset idx,is
585: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
586: ! Get ready for local Jacobian computations
587: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
589: ! Scatter ghost points to local vector, using the 2-step process
590: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
591: ! By placing code between these two statements, computations can be
592: ! done while messages are in transition.
594: call DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
595: call DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
597: ! Get pointer to vector data
598: call VecGetArray(u_local,localptr,idx,ierr)
600: ! Get starting and ending locally owned rows of the matrix
601: call MatGetOwnershipRange(A,mstarts,mends,ierr)
602: mstart(1) = mstarts
603: mend(1) = mends
605: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
606: ! Compute entries for the locally owned part of the Jacobian.
607: ! - Currently, all PETSc parallel matrix formats are partitioned by
608: ! contiguous chunks of rows across the processors.
609: ! - Each processor needs to insert only elements that it owns
610: ! locally (but any non-local elements will be sent to the
611: ! appropriate processor during matrix assembly).
612: ! - Here, we set all entries for a particular row at once.
613: ! - We can set matrix entries either using either
614: ! MatSetValuesLocal() or MatSetValues().
615: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
617: ! Set matrix rows corresponding to boundary data
618: i1 = 1
619: i3 = 3
620: if (mstart(1) .eq. 0) then
621: v(1) = zero_d0
622: call MatSetValues(A,i1,mstart,i1,mstart,v,INSERT_VALUES,ierr)
623: mstart(1) = mstart(1) + 1
624: endif
625: if (mend(1) .eq. m) then
626: mend(1) = mend(1) - 1
627: v(1) = zero_d0
628: call MatSetValues(A,i1,mend,i1,mend,v,INSERT_VALUES,ierr)
629: endif
631: ! Set matrix rows corresponding to interior data.
632: ! We construct the matrix one row at a time.
634: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
635: do 10, i=mstart(1),mend(1)-1
636: col(1) = i-1
637: col(2) = i
638: col(3) = i+1
639: is = i - mstart(1) + 1 +idx + 1
640: v(1) = sc*localptr(is)
641: v(2) = sc*(localptr(is+1) + localptr(is-1) - &
642: & four_d0*localptr(is))
643: v(3) = sc*localptr(is)
644: call MatSetValues(A,i1,i,i3,col,v,INSERT_VALUES,ierr)
645: 10 continue
647: ! Restore vector
648: call VecRestoreArray(u_local,localptr,idx,ierr)
650: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
651: ! Complete the matrix assembly process and set some options
652: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
654: ! Assemble matrix, using the 2-step process:
655: ! MatAssemblyBegin(), MatAssemblyEnd()
656: ! Computations can be done while messages are in transition,
657: ! by placing code between these two statements.
659: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
660: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
663: ! Set and option to indicate that we will never add a new nonzero location
664: ! to the matrix. If we do, it will generate an error.
666: call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)
668: return
669: end