Actual source code: dainterp.c
petsc-3.6.1 2015-08-06
2: /*
3: Code for interpolating between grids represented by DMDAs
4: */
6: /*
7: For linear elements there are two branches of code to compute the interpolation. They should compute the same results but may not. The "new version" does
8: not work for periodic domains, the old does. Change NEWVERSION to 1 to compile in the new version. Eventually when we are sure the two produce identical results
9: we will remove/merge the new version. Based on current tests, these both produce the same results. We are leaving NEWVERSION for now in the code since some
10: consider it cleaner, but old version is turned on since it handles periodic case.
11: */
12: #define NEWVERSION 0
14: #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/
18: /*@
19: DMCreateInterpolationScale - Forms L = R*1/diag(R*1) - L.*v is like a coarse grid average of the
20: nearby fine grid points.
22: Input Parameters:
23: + dac - DM that defines a coarse mesh
24: . daf - DM that defines a fine mesh
25: - mat - the restriction (or interpolation operator) from fine to coarse
27: Output Parameter:
28: . scale - the scaled vector
30: Level: developer
32: .seealso: DMCreateInterpolation()
34: @*/
35: PetscErrorCode DMCreateInterpolationScale(DM dac,DM daf,Mat mat,Vec *scale)
36: {
38: Vec fine;
39: PetscScalar one = 1.0;
42: DMCreateGlobalVector(daf,&fine);
43: DMCreateGlobalVector(dac,scale);
44: VecSet(fine,one);
45: MatRestrict(mat,fine,*scale);
46: VecDestroy(&fine);
47: VecReciprocal(*scale);
48: return(0);
49: }
53: PetscErrorCode DMCreateInterpolation_DA_1D_Q1(DM dac,DM daf,Mat *A)
54: {
55: PetscErrorCode ierr;
56: PetscInt i,i_start,m_f,Mx;
57: const PetscInt *idx_f,*idx_c;
58: PetscInt m_ghost,m_ghost_c;
59: PetscInt row,col,i_start_ghost,mx,m_c,nc,ratio;
60: PetscInt i_c,i_start_c,i_start_ghost_c,cols[2],dof;
61: PetscScalar v[2],x;
62: Mat mat;
63: DMBoundaryType bx;
64: ISLocalToGlobalMapping ltog_f,ltog_c;
68: DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
69: DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
70: if (bx == DM_BOUNDARY_PERIODIC) {
71: ratio = mx/Mx;
72: if (ratio*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
73: } else {
74: ratio = (mx-1)/(Mx-1);
75: if (ratio*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
76: }
78: DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
79: DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
80: DMGetLocalToGlobalMapping(daf,<og_f);
81: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
83: DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
84: DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);
85: DMGetLocalToGlobalMapping(dac,<og_c);
86: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
88: /* create interpolation matrix */
89: MatCreate(PetscObjectComm((PetscObject)dac),&mat);
90: MatSetSizes(mat,m_f,m_c,mx,Mx);
91: MatSetType(mat,MATAIJ);
92: MatSeqAIJSetPreallocation(mat,2,NULL);
93: MatMPIAIJSetPreallocation(mat,2,NULL,1,NULL);
95: /* loop over local fine grid nodes setting interpolation for those*/
96: if (!NEWVERSION) {
98: for (i=i_start; i<i_start+m_f; i++) {
99: /* convert to local "natural" numbering and then to PETSc global numbering */
100: row = idx_f[i-i_start_ghost];
102: i_c = (i/ratio); /* coarse grid node to left of fine grid node */
103: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
104: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
106: /*
107: Only include those interpolation points that are truly
108: nonzero. Note this is very important for final grid lines
109: in x direction; since they have no right neighbor
110: */
111: x = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
112: nc = 0;
113: /* one left and below; or we are right on it */
114: col = (i_c-i_start_ghost_c);
115: cols[nc] = idx_c[col];
116: v[nc++] = -x + 1.0;
117: /* one right? */
118: if (i_c*ratio != i) {
119: cols[nc] = idx_c[col+1];
120: v[nc++] = x;
121: }
122: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
123: }
125: } else {
126: PetscScalar *xi;
127: PetscInt li,nxi,n;
128: PetscScalar Ni[2];
130: /* compute local coordinate arrays */
131: nxi = ratio + 1;
132: PetscMalloc1(nxi,&xi);
133: for (li=0; li<nxi; li++) {
134: xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
135: }
137: for (i=i_start; i<i_start+m_f; i++) {
138: /* convert to local "natural" numbering and then to PETSc global numbering */
139: row = idx_f[(i-i_start_ghost)];
141: i_c = (i/ratio); /* coarse grid node to left of fine grid node */
142: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
143: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
145: /* remainders */
146: li = i - ratio * (i/ratio);
147: if (i==mx-1) li = nxi-1;
149: /* corners */
150: col = (i_c-i_start_ghost_c);
151: cols[0] = idx_c[col];
152: Ni[0] = 1.0;
153: if ((li==0) || (li==nxi-1)) {
154: MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
155: continue;
156: }
158: /* edges + interior */
159: /* remainders */
160: if (i==mx-1) i_c--;
162: col = (i_c-i_start_ghost_c);
163: cols[0] = idx_c[col]; /* one left and below; or we are right on it */
164: cols[1] = idx_c[col+1];
166: Ni[0] = 0.5*(1.0-xi[li]);
167: Ni[1] = 0.5*(1.0+xi[li]);
168: for (n=0; n<2; n++) {
169: if (PetscAbsScalar(Ni[n])<1.0e-32) cols[n]=-1;
170: }
171: MatSetValues(mat,1,&row,2,cols,Ni,INSERT_VALUES);
172: }
173: PetscFree(xi);
174: }
175: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
176: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
177: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
178: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
179: MatCreateMAIJ(mat,dof,A);
180: MatDestroy(&mat);
181: return(0);
182: }
186: PetscErrorCode DMCreateInterpolation_DA_1D_Q0(DM dac,DM daf,Mat *A)
187: {
188: PetscErrorCode ierr;
189: PetscInt i,i_start,m_f,Mx;
190: const PetscInt *idx_f,*idx_c;
191: ISLocalToGlobalMapping ltog_f,ltog_c;
192: PetscInt m_ghost,m_ghost_c;
193: PetscInt row,col,i_start_ghost,mx,m_c,nc,ratio;
194: PetscInt i_c,i_start_c,i_start_ghost_c,cols[2],dof;
195: PetscScalar v[2],x;
196: Mat mat;
197: DMBoundaryType bx;
200: DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
201: DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
202: if (bx == DM_BOUNDARY_PERIODIC) {
203: ratio = mx/Mx;
204: if (ratio*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
205: } else {
206: ratio = (mx-1)/(Mx-1);
207: if (ratio*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
208: }
210: DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
211: DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
212: DMGetLocalToGlobalMapping(daf,<og_f);
213: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
215: DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
216: DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);
217: DMGetLocalToGlobalMapping(dac,<og_c);
218: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
220: /* create interpolation matrix */
221: MatCreate(PetscObjectComm((PetscObject)dac),&mat);
222: MatSetSizes(mat,m_f,m_c,mx,Mx);
223: MatSetType(mat,MATAIJ);
224: MatSeqAIJSetPreallocation(mat,2,NULL);
225: MatMPIAIJSetPreallocation(mat,2,NULL,0,NULL);
227: /* loop over local fine grid nodes setting interpolation for those*/
228: for (i=i_start; i<i_start+m_f; i++) {
229: /* convert to local "natural" numbering and then to PETSc global numbering */
230: row = idx_f[(i-i_start_ghost)];
232: i_c = (i/ratio); /* coarse grid node to left of fine grid node */
234: /*
235: Only include those interpolation points that are truly
236: nonzero. Note this is very important for final grid lines
237: in x direction; since they have no right neighbor
238: */
239: x = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
240: nc = 0;
241: /* one left and below; or we are right on it */
242: col = (i_c-i_start_ghost_c);
243: cols[nc] = idx_c[col];
244: v[nc++] = -x + 1.0;
245: /* one right? */
246: if (i_c*ratio != i) {
247: cols[nc] = idx_c[col+1];
248: v[nc++] = x;
249: }
250: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
251: }
252: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
253: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
254: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
255: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
256: MatCreateMAIJ(mat,dof,A);
257: MatDestroy(&mat);
258: PetscLogFlops(5.0*m_f);
259: return(0);
260: }
264: PetscErrorCode DMCreateInterpolation_DA_2D_Q1(DM dac,DM daf,Mat *A)
265: {
266: PetscErrorCode ierr;
267: PetscInt i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
268: const PetscInt *idx_c,*idx_f;
269: ISLocalToGlobalMapping ltog_f,ltog_c;
270: PetscInt m_ghost,n_ghost,m_ghost_c,n_ghost_c,*dnz,*onz;
271: PetscInt row,col,i_start_ghost,j_start_ghost,cols[4],mx,m_c,my,nc,ratioi,ratioj;
272: PetscInt i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col_shift,col_scale;
273: PetscMPIInt size_c,size_f,rank_f;
274: PetscScalar v[4],x,y;
275: Mat mat;
276: DMBoundaryType bx,by;
279: DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
280: DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
281: if (bx == DM_BOUNDARY_PERIODIC) {
282: ratioi = mx/Mx;
283: if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
284: } else {
285: ratioi = (mx-1)/(Mx-1);
286: if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
287: }
288: if (by == DM_BOUNDARY_PERIODIC) {
289: ratioj = my/My;
290: if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My must be integer: my %D My %D",my,My);
291: } else {
292: ratioj = (my-1)/(My-1);
293: if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
294: }
297: DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
298: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
299: DMGetLocalToGlobalMapping(daf,<og_f);
300: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
302: DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
303: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
304: DMGetLocalToGlobalMapping(dac,<og_c);
305: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
307: /*
308: Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
309: The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
310: processors). It's effective length is hence 4 times its normal length, this is
311: why the col_scale is multiplied by the interpolation matrix column sizes.
312: sol_shift allows each set of 1/4 processors do its own interpolation using ITS
313: copy of the coarse vector. A bit of a hack but you do better.
315: In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
316: */
317: MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
318: MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
319: MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
320: col_scale = size_f/size_c;
321: col_shift = Mx*My*(rank_f/size_c);
323: MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f,col_scale*m_c*n_c,dnz,onz);
324: for (j=j_start; j<j_start+n_f; j++) {
325: for (i=i_start; i<i_start+m_f; i++) {
326: /* convert to local "natural" numbering and then to PETSc global numbering */
327: row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
329: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
330: j_c = (j/ratioj); /* coarse grid node below fine grid node */
332: if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
333: j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
334: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
335: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
337: /*
338: Only include those interpolation points that are truly
339: nonzero. Note this is very important for final grid lines
340: in x and y directions; since they have no right/top neighbors
341: */
342: nc = 0;
343: /* one left and below; or we are right on it */
344: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
345: cols[nc++] = col_shift + idx_c[col];
346: /* one right and below */
347: if (i_c*ratioi != i) cols[nc++] = col_shift + idx_c[col+1];
348: /* one left and above */
349: if (j_c*ratioj != j) cols[nc++] = col_shift + idx_c[col+m_ghost_c];
350: /* one right and above */
351: if (i_c*ratioi != i && j_c*ratioj != j) cols[nc++] = col_shift + idx_c[col+(m_ghost_c+1)];
352: MatPreallocateSet(row,nc,cols,dnz,onz);
353: }
354: }
355: MatCreate(PetscObjectComm((PetscObject)daf),&mat);
356: MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);
357: MatSetType(mat,MATAIJ);
358: MatSeqAIJSetPreallocation(mat,0,dnz);
359: MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
360: MatPreallocateFinalize(dnz,onz);
362: /* loop over local fine grid nodes setting interpolation for those*/
363: if (!NEWVERSION) {
365: for (j=j_start; j<j_start+n_f; j++) {
366: for (i=i_start; i<i_start+m_f; i++) {
367: /* convert to local "natural" numbering and then to PETSc global numbering */
368: row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
370: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
371: j_c = (j/ratioj); /* coarse grid node below fine grid node */
373: /*
374: Only include those interpolation points that are truly
375: nonzero. Note this is very important for final grid lines
376: in x and y directions; since they have no right/top neighbors
377: */
378: x = ((PetscReal)(i - i_c*ratioi))/((PetscReal)ratioi);
379: y = ((PetscReal)(j - j_c*ratioj))/((PetscReal)ratioj);
381: nc = 0;
382: /* one left and below; or we are right on it */
383: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
384: cols[nc] = col_shift + idx_c[col];
385: v[nc++] = x*y - x - y + 1.0;
386: /* one right and below */
387: if (i_c*ratioi != i) {
388: cols[nc] = col_shift + idx_c[col+1];
389: v[nc++] = -x*y + x;
390: }
391: /* one left and above */
392: if (j_c*ratioj != j) {
393: cols[nc] = col_shift + idx_c[col+m_ghost_c];
394: v[nc++] = -x*y + y;
395: }
396: /* one right and above */
397: if (j_c*ratioj != j && i_c*ratioi != i) {
398: cols[nc] = col_shift + idx_c[col+(m_ghost_c+1)];
399: v[nc++] = x*y;
400: }
401: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
402: }
403: }
405: } else {
406: PetscScalar Ni[4];
407: PetscScalar *xi,*eta;
408: PetscInt li,nxi,lj,neta;
410: /* compute local coordinate arrays */
411: nxi = ratioi + 1;
412: neta = ratioj + 1;
413: PetscMalloc1(nxi,&xi);
414: PetscMalloc1(neta,&eta);
415: for (li=0; li<nxi; li++) {
416: xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
417: }
418: for (lj=0; lj<neta; lj++) {
419: eta[lj] = -1.0 + (PetscScalar)lj*(2.0/(PetscScalar)(neta-1));
420: }
422: /* loop over local fine grid nodes setting interpolation for those*/
423: for (j=j_start; j<j_start+n_f; j++) {
424: for (i=i_start; i<i_start+m_f; i++) {
425: /* convert to local "natural" numbering and then to PETSc global numbering */
426: row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
428: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
429: j_c = (j/ratioj); /* coarse grid node below fine grid node */
431: /* remainders */
432: li = i - ratioi * (i/ratioi);
433: if (i==mx-1) li = nxi-1;
434: lj = j - ratioj * (j/ratioj);
435: if (j==my-1) lj = neta-1;
437: /* corners */
438: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
439: cols[0] = col_shift + idx_c[col]; /* left, below */
440: Ni[0] = 1.0;
441: if ((li==0) || (li==nxi-1)) {
442: if ((lj==0) || (lj==neta-1)) {
443: MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
444: continue;
445: }
446: }
448: /* edges + interior */
449: /* remainders */
450: if (i==mx-1) i_c--;
451: if (j==my-1) j_c--;
453: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
454: cols[0] = col_shift + idx_c[col]; /* left, below */
455: cols[1] = col_shift + idx_c[col+1]; /* right, below */
456: cols[2] = col_shift + idx_c[col+m_ghost_c]; /* left, above */
457: cols[3] = col_shift + idx_c[col+(m_ghost_c+1)]; /* right, above */
459: Ni[0] = 0.25*(1.0-xi[li])*(1.0-eta[lj]);
460: Ni[1] = 0.25*(1.0+xi[li])*(1.0-eta[lj]);
461: Ni[2] = 0.25*(1.0-xi[li])*(1.0+eta[lj]);
462: Ni[3] = 0.25*(1.0+xi[li])*(1.0+eta[lj]);
464: nc = 0;
465: if (PetscAbsScalar(Ni[0])<1.0e-32) cols[0]=-1;
466: if (PetscAbsScalar(Ni[1])<1.0e-32) cols[1]=-1;
467: if (PetscAbsScalar(Ni[2])<1.0e-32) cols[2]=-1;
468: if (PetscAbsScalar(Ni[3])<1.0e-32) cols[3]=-1;
470: MatSetValues(mat,1,&row,4,cols,Ni,INSERT_VALUES);
471: }
472: }
473: PetscFree(xi);
474: PetscFree(eta);
475: }
476: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
477: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
478: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
479: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
480: MatCreateMAIJ(mat,dof,A);
481: MatDestroy(&mat);
482: return(0);
483: }
485: /*
486: Contributed by Andrei Draganescu <aidraga@sandia.gov>
487: */
490: PetscErrorCode DMCreateInterpolation_DA_2D_Q0(DM dac,DM daf,Mat *A)
491: {
492: PetscErrorCode ierr;
493: PetscInt i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
494: const PetscInt *idx_c,*idx_f;
495: ISLocalToGlobalMapping ltog_f,ltog_c;
496: PetscInt m_ghost,n_ghost,m_ghost_c,n_ghost_c,*dnz,*onz;
497: PetscInt row,col,i_start_ghost,j_start_ghost,cols[4],mx,m_c,my,nc,ratioi,ratioj;
498: PetscInt i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col_shift,col_scale;
499: PetscMPIInt size_c,size_f,rank_f;
500: PetscScalar v[4];
501: Mat mat;
502: DMBoundaryType bx,by;
505: DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
506: DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
507: if (bx == DM_BOUNDARY_PERIODIC) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Cannot handle periodic grid in x");
508: if (by == DM_BOUNDARY_PERIODIC) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Cannot handle periodic grid in y");
509: ratioi = mx/Mx;
510: ratioj = my/My;
511: if (ratioi*Mx != mx) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in x");
512: if (ratioj*My != my) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in y");
513: if (ratioi != 2) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in x must be 2");
514: if (ratioj != 2) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in y must be 2");
516: DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
517: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
518: DMGetLocalToGlobalMapping(daf,<og_f);
519: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
521: DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
522: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
523: DMGetLocalToGlobalMapping(dac,<og_c);
524: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
526: /*
527: Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
528: The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
529: processors). It's effective length is hence 4 times its normal length, this is
530: why the col_scale is multiplied by the interpolation matrix column sizes.
531: sol_shift allows each set of 1/4 processors do its own interpolation using ITS
532: copy of the coarse vector. A bit of a hack but you do better.
534: In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
535: */
536: MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
537: MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
538: MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
539: col_scale = size_f/size_c;
540: col_shift = Mx*My*(rank_f/size_c);
542: MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f,col_scale*m_c*n_c,dnz,onz);
543: for (j=j_start; j<j_start+n_f; j++) {
544: for (i=i_start; i<i_start+m_f; i++) {
545: /* convert to local "natural" numbering and then to PETSc global numbering */
546: row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
548: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
549: j_c = (j/ratioj); /* coarse grid node below fine grid node */
551: if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
552: j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
553: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
554: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
556: /*
557: Only include those interpolation points that are truly
558: nonzero. Note this is very important for final grid lines
559: in x and y directions; since they have no right/top neighbors
560: */
561: nc = 0;
562: /* one left and below; or we are right on it */
563: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
564: cols[nc++] = col_shift + idx_c[col];
565: MatPreallocateSet(row,nc,cols,dnz,onz);
566: }
567: }
568: MatCreate(PetscObjectComm((PetscObject)daf),&mat);
569: MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);
570: MatSetType(mat,MATAIJ);
571: MatSeqAIJSetPreallocation(mat,0,dnz);
572: MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
573: MatPreallocateFinalize(dnz,onz);
575: /* loop over local fine grid nodes setting interpolation for those*/
576: for (j=j_start; j<j_start+n_f; j++) {
577: for (i=i_start; i<i_start+m_f; i++) {
578: /* convert to local "natural" numbering and then to PETSc global numbering */
579: row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
581: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
582: j_c = (j/ratioj); /* coarse grid node below fine grid node */
583: nc = 0;
584: /* one left and below; or we are right on it */
585: col = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
586: cols[nc] = col_shift + idx_c[col];
587: v[nc++] = 1.0;
589: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
590: }
591: }
592: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
593: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
594: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
595: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
596: MatCreateMAIJ(mat,dof,A);
597: MatDestroy(&mat);
598: PetscLogFlops(13.0*m_f*n_f);
599: return(0);
600: }
602: /*
603: Contributed by Jianming Yang <jianming-yang@uiowa.edu>
604: */
607: PetscErrorCode DMCreateInterpolation_DA_3D_Q0(DM dac,DM daf,Mat *A)
608: {
609: PetscErrorCode ierr;
610: PetscInt i,j,l,i_start,j_start,l_start,m_f,n_f,p_f,Mx,My,Mz,dof;
611: const PetscInt *idx_c,*idx_f;
612: ISLocalToGlobalMapping ltog_f,ltog_c;
613: PetscInt m_ghost,n_ghost,p_ghost,m_ghost_c,n_ghost_c,p_ghost_c,nc,*dnz,*onz;
614: PetscInt row,col,i_start_ghost,j_start_ghost,l_start_ghost,cols[8],mx,m_c,my,n_c,mz,p_c,ratioi,ratioj,ratiol;
615: PetscInt i_c,j_c,l_c,i_start_c,j_start_c,l_start_c,i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,col_shift,col_scale;
616: PetscMPIInt size_c,size_f,rank_f;
617: PetscScalar v[8];
618: Mat mat;
619: DMBoundaryType bx,by,bz;
622: DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
623: DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);
624: if (bx == DM_BOUNDARY_PERIODIC) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Cannot handle periodic grid in x");
625: if (by == DM_BOUNDARY_PERIODIC) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Cannot handle periodic grid in y");
626: if (bz == DM_BOUNDARY_PERIODIC) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Cannot handle periodic grid in z");
627: ratioi = mx/Mx;
628: ratioj = my/My;
629: ratiol = mz/Mz;
630: if (ratioi*Mx != mx) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in x");
631: if (ratioj*My != my) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in y");
632: if (ratiol*Mz != mz) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in z");
633: if (ratioi != 2 && ratioi != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in x must be 1 or 2");
634: if (ratioj != 2 && ratioj != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in y must be 1 or 2");
635: if (ratiol != 2 && ratiol != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in z must be 1 or 2");
637: DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
638: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);
639: DMGetLocalToGlobalMapping(daf,<og_f);
640: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
642: DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
643: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
644: DMGetLocalToGlobalMapping(dac,<og_c);
645: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
647: /*
648: Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
649: The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
650: processors). It's effective length is hence 4 times its normal length, this is
651: why the col_scale is multiplied by the interpolation matrix column sizes.
652: sol_shift allows each set of 1/4 processors do its own interpolation using ITS
653: copy of the coarse vector. A bit of a hack but you do better.
655: In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
656: */
657: MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
658: MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
659: MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
660: col_scale = size_f/size_c;
661: col_shift = Mx*My*Mz*(rank_f/size_c);
663: MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f*p_f,col_scale*m_c*n_c*p_c,dnz,onz);
664: for (l=l_start; l<l_start+p_f; l++) {
665: for (j=j_start; j<j_start+n_f; j++) {
666: for (i=i_start; i<i_start+m_f; i++) {
667: /* convert to local "natural" numbering and then to PETSc global numbering */
668: row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
670: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
671: j_c = (j/ratioj); /* coarse grid node below fine grid node */
672: l_c = (l/ratiol);
674: if (l_c < l_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
675: l_start %D l_c %D l_start_ghost_c %D",l_start,l_c,l_start_ghost_c);
676: if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
677: j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
678: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
679: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
681: /*
682: Only include those interpolation points that are truly
683: nonzero. Note this is very important for final grid lines
684: in x and y directions; since they have no right/top neighbors
685: */
686: nc = 0;
687: /* one left and below; or we are right on it */
688: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
689: cols[nc++] = col_shift + idx_c[col];
690: MatPreallocateSet(row,nc,cols,dnz,onz);
691: }
692: }
693: }
694: MatCreate(PetscObjectComm((PetscObject)daf),&mat);
695: MatSetSizes(mat,m_f*n_f*p_f,col_scale*m_c*n_c*p_c,mx*my*mz,col_scale*Mx*My*Mz);
696: MatSetType(mat,MATAIJ);
697: MatSeqAIJSetPreallocation(mat,0,dnz);
698: MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
699: MatPreallocateFinalize(dnz,onz);
701: /* loop over local fine grid nodes setting interpolation for those*/
702: for (l=l_start; l<l_start+p_f; l++) {
703: for (j=j_start; j<j_start+n_f; j++) {
704: for (i=i_start; i<i_start+m_f; i++) {
705: /* convert to local "natural" numbering and then to PETSc global numbering */
706: row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
708: i_c = (i/ratioi); /* coarse grid node to left of fine grid node */
709: j_c = (j/ratioj); /* coarse grid node below fine grid node */
710: l_c = (l/ratiol);
711: nc = 0;
712: /* one left and below; or we are right on it */
713: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
714: cols[nc] = col_shift + idx_c[col];
715: v[nc++] = 1.0;
717: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
718: }
719: }
720: }
721: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
722: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
723: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
724: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
725: MatCreateMAIJ(mat,dof,A);
726: MatDestroy(&mat);
727: PetscLogFlops(13.0*m_f*n_f*p_f);
728: return(0);
729: }
733: PetscErrorCode DMCreateInterpolation_DA_3D_Q1(DM dac,DM daf,Mat *A)
734: {
735: PetscErrorCode ierr;
736: PetscInt i,j,i_start,j_start,m_f,n_f,Mx,My,dof,l;
737: const PetscInt *idx_c,*idx_f;
738: ISLocalToGlobalMapping ltog_f,ltog_c;
739: PetscInt m_ghost,n_ghost,m_ghost_c,n_ghost_c,Mz,mz;
740: PetscInt row,col,i_start_ghost,j_start_ghost,cols[8],mx,m_c,my,nc,ratioi,ratioj,ratiok;
741: PetscInt i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c;
742: PetscInt l_start,p_f,l_start_ghost,p_ghost,l_start_c,p_c;
743: PetscInt l_start_ghost_c,p_ghost_c,l_c,*dnz,*onz;
744: PetscScalar v[8],x,y,z;
745: Mat mat;
746: DMBoundaryType bx,by,bz;
749: DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
750: DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);
751: if (mx == Mx) {
752: ratioi = 1;
753: } else if (bx == DM_BOUNDARY_PERIODIC) {
754: ratioi = mx/Mx;
755: if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
756: } else {
757: ratioi = (mx-1)/(Mx-1);
758: if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
759: }
760: if (my == My) {
761: ratioj = 1;
762: } else if (by == DM_BOUNDARY_PERIODIC) {
763: ratioj = my/My;
764: if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My must be integer: my %D My %D",my,My);
765: } else {
766: ratioj = (my-1)/(My-1);
767: if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
768: }
769: if (mz == Mz) {
770: ratiok = 1;
771: } else if (bz == DM_BOUNDARY_PERIODIC) {
772: ratiok = mz/Mz;
773: if (ratiok*Mz != mz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mz/Mz must be integer: mz %D Mz %D",mz,Mz);
774: } else {
775: ratiok = (mz-1)/(Mz-1);
776: if (ratiok*(Mz-1) != mz-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %D Mz %D",mz,Mz);
777: }
779: DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
780: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);
781: DMGetLocalToGlobalMapping(daf,<og_f);
782: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
784: DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
785: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
786: DMGetLocalToGlobalMapping(dac,<og_c);
787: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
789: /* create interpolation matrix, determining exact preallocation */
790: MatPreallocateInitialize(PetscObjectComm((PetscObject)dac),m_f*n_f*p_f,m_c*n_c*p_c,dnz,onz);
791: /* loop over local fine grid nodes counting interpolating points */
792: for (l=l_start; l<l_start+p_f; l++) {
793: for (j=j_start; j<j_start+n_f; j++) {
794: for (i=i_start; i<i_start+m_f; i++) {
795: /* convert to local "natural" numbering and then to PETSc global numbering */
796: row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
797: i_c = (i/ratioi);
798: j_c = (j/ratioj);
799: l_c = (l/ratiok);
800: if (l_c < l_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
801: l_start %D l_c %D l_start_ghost_c %D",l_start,l_c,l_start_ghost_c);
802: if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
803: j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
804: if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
805: i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);
807: /*
808: Only include those interpolation points that are truly
809: nonzero. Note this is very important for final grid lines
810: in x and y directions; since they have no right/top neighbors
811: */
812: nc = 0;
813: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
814: cols[nc++] = idx_c[col];
815: if (i_c*ratioi != i) {
816: cols[nc++] = idx_c[col+1];
817: }
818: if (j_c*ratioj != j) {
819: cols[nc++] = idx_c[col+m_ghost_c];
820: }
821: if (l_c*ratiok != l) {
822: cols[nc++] = idx_c[col+m_ghost_c*n_ghost_c];
823: }
824: if (j_c*ratioj != j && i_c*ratioi != i) {
825: cols[nc++] = idx_c[col+(m_ghost_c+1)];
826: }
827: if (j_c*ratioj != j && l_c*ratiok != l) {
828: cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)];
829: }
830: if (i_c*ratioi != i && l_c*ratiok != l) {
831: cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+1)];
832: }
833: if (i_c*ratioi != i && l_c*ratiok != l && j_c*ratioj != j) {
834: cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)];
835: }
836: MatPreallocateSet(row,nc,cols,dnz,onz);
837: }
838: }
839: }
840: MatCreate(PetscObjectComm((PetscObject)dac),&mat);
841: MatSetSizes(mat,m_f*n_f*p_f,m_c*n_c*p_c,mx*my*mz,Mx*My*Mz);
842: MatSetType(mat,MATAIJ);
843: MatSeqAIJSetPreallocation(mat,0,dnz);
844: MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
845: MatPreallocateFinalize(dnz,onz);
847: /* loop over local fine grid nodes setting interpolation for those*/
848: if (!NEWVERSION) {
850: for (l=l_start; l<l_start+p_f; l++) {
851: for (j=j_start; j<j_start+n_f; j++) {
852: for (i=i_start; i<i_start+m_f; i++) {
853: /* convert to local "natural" numbering and then to PETSc global numbering */
854: row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
856: i_c = (i/ratioi);
857: j_c = (j/ratioj);
858: l_c = (l/ratiok);
860: /*
861: Only include those interpolation points that are truly
862: nonzero. Note this is very important for final grid lines
863: in x and y directions; since they have no right/top neighbors
864: */
865: x = ((PetscReal)(i - i_c*ratioi))/((PetscReal)ratioi);
866: y = ((PetscReal)(j - j_c*ratioj))/((PetscReal)ratioj);
867: z = ((PetscReal)(l - l_c*ratiok))/((PetscReal)ratiok);
869: nc = 0;
870: /* one left and below; or we are right on it */
871: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c)+m_ghost_c*(j_c-j_start_ghost_c)+(i_c-i_start_ghost_c));
873: cols[nc] = idx_c[col];
874: v[nc++] = .125*(1. - (2.0*x-1.))*(1. - (2.0*y-1.))*(1. - (2.0*z-1.));
876: if (i_c*ratioi != i) {
877: cols[nc] = idx_c[col+1];
878: v[nc++] = .125*(1. + (2.0*x-1.))*(1. - (2.0*y-1.))*(1. - (2.0*z-1.));
879: }
881: if (j_c*ratioj != j) {
882: cols[nc] = idx_c[col+m_ghost_c];
883: v[nc++] = .125*(1. - (2.0*x-1.))*(1. + (2.0*y-1.))*(1. - (2.0*z-1.));
884: }
886: if (l_c*ratiok != l) {
887: cols[nc] = idx_c[col+m_ghost_c*n_ghost_c];
888: v[nc++] = .125*(1. - (2.0*x-1.))*(1. - (2.0*y-1.))*(1. + (2.0*z-1.));
889: }
891: if (j_c*ratioj != j && i_c*ratioi != i) {
892: cols[nc] = idx_c[col+(m_ghost_c+1)];
893: v[nc++] = .125*(1. + (2.0*x-1.))*(1. + (2.0*y-1.))*(1. - (2.0*z-1.));
894: }
896: if (j_c*ratioj != j && l_c*ratiok != l) {
897: cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)];
898: v[nc++] = .125*(1. - (2.0*x-1.))*(1. + (2.0*y-1.))*(1. + (2.0*z-1.));
899: }
901: if (i_c*ratioi != i && l_c*ratiok != l) {
902: cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+1)];
903: v[nc++] = .125*(1. + (2.0*x-1.))*(1. - (2.0*y-1.))*(1. + (2.0*z-1.));
904: }
906: if (i_c*ratioi != i && l_c*ratiok != l && j_c*ratioj != j) {
907: cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)];
908: v[nc++] = .125*(1. + (2.0*x-1.))*(1. + (2.0*y-1.))*(1. + (2.0*z-1.));
909: }
910: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
911: }
912: }
913: }
915: } else {
916: PetscScalar *xi,*eta,*zeta;
917: PetscInt li,nxi,lj,neta,lk,nzeta,n;
918: PetscScalar Ni[8];
920: /* compute local coordinate arrays */
921: nxi = ratioi + 1;
922: neta = ratioj + 1;
923: nzeta = ratiok + 1;
924: PetscMalloc1(nxi,&xi);
925: PetscMalloc1(neta,&eta);
926: PetscMalloc1(nzeta,&zeta);
927: for (li=0; li<nxi; li++) xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
928: for (lj=0; lj<neta; lj++) eta[lj] = -1.0 + (PetscScalar)lj*(2.0/(PetscScalar)(neta-1));
929: for (lk=0; lk<nzeta; lk++) zeta[lk] = -1.0 + (PetscScalar)lk*(2.0/(PetscScalar)(nzeta-1));
931: for (l=l_start; l<l_start+p_f; l++) {
932: for (j=j_start; j<j_start+n_f; j++) {
933: for (i=i_start; i<i_start+m_f; i++) {
934: /* convert to local "natural" numbering and then to PETSc global numbering */
935: row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
937: i_c = (i/ratioi);
938: j_c = (j/ratioj);
939: l_c = (l/ratiok);
941: /* remainders */
942: li = i - ratioi * (i/ratioi);
943: if (i==mx-1) li = nxi-1;
944: lj = j - ratioj * (j/ratioj);
945: if (j==my-1) lj = neta-1;
946: lk = l - ratiok * (l/ratiok);
947: if (l==mz-1) lk = nzeta-1;
949: /* corners */
950: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c)+m_ghost_c*(j_c-j_start_ghost_c)+(i_c-i_start_ghost_c));
951: cols[0] = idx_c[col];
952: Ni[0] = 1.0;
953: if ((li==0) || (li==nxi-1)) {
954: if ((lj==0) || (lj==neta-1)) {
955: if ((lk==0) || (lk==nzeta-1)) {
956: MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
957: continue;
958: }
959: }
960: }
962: /* edges + interior */
963: /* remainders */
964: if (i==mx-1) i_c--;
965: if (j==my-1) j_c--;
966: if (l==mz-1) l_c--;
968: col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
969: cols[0] = idx_c[col]; /* one left and below; or we are right on it */
970: cols[1] = idx_c[col+1]; /* one right and below */
971: cols[2] = idx_c[col+m_ghost_c]; /* one left and above */
972: cols[3] = idx_c[col+(m_ghost_c+1)]; /* one right and above */
974: cols[4] = idx_c[col+m_ghost_c*n_ghost_c]; /* one left and below and front; or we are right on it */
975: cols[5] = idx_c[col+(m_ghost_c*n_ghost_c+1)]; /* one right and below, and front */
976: cols[6] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)]; /* one left and above and front*/
977: cols[7] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)]; /* one right and above and front */
979: Ni[0] = 0.125*(1.0-xi[li])*(1.0-eta[lj])*(1.0-zeta[lk]);
980: Ni[1] = 0.125*(1.0+xi[li])*(1.0-eta[lj])*(1.0-zeta[lk]);
981: Ni[2] = 0.125*(1.0-xi[li])*(1.0+eta[lj])*(1.0-zeta[lk]);
982: Ni[3] = 0.125*(1.0+xi[li])*(1.0+eta[lj])*(1.0-zeta[lk]);
984: Ni[4] = 0.125*(1.0-xi[li])*(1.0-eta[lj])*(1.0+zeta[lk]);
985: Ni[5] = 0.125*(1.0+xi[li])*(1.0-eta[lj])*(1.0+zeta[lk]);
986: Ni[6] = 0.125*(1.0-xi[li])*(1.0+eta[lj])*(1.0+zeta[lk]);
987: Ni[7] = 0.125*(1.0+xi[li])*(1.0+eta[lj])*(1.0+zeta[lk]);
989: for (n=0; n<8; n++) {
990: if (PetscAbsScalar(Ni[n])<1.0e-32) cols[n]=-1;
991: }
992: MatSetValues(mat,1,&row,8,cols,Ni,INSERT_VALUES);
994: }
995: }
996: }
997: PetscFree(xi);
998: PetscFree(eta);
999: PetscFree(zeta);
1000: }
1001: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1002: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
1003: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
1004: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
1006: MatCreateMAIJ(mat,dof,A);
1007: MatDestroy(&mat);
1008: return(0);
1009: }
1013: PetscErrorCode DMCreateInterpolation_DA(DM dac,DM daf,Mat *A,Vec *scale)
1014: {
1015: PetscErrorCode ierr;
1016: PetscInt dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc,dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
1017: DMBoundaryType bxc,byc,bzc,bxf,byf,bzf;
1018: DMDAStencilType stc,stf;
1019: DM_DA *ddc = (DM_DA*)dac->data;
1027: DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1028: DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1029: if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1030: if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1031: if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1032: if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1033: if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");
1034: if (Mc < 2 && Mf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in x direction");
1035: if (dimc > 1 && Nc < 2 && Nf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in y direction");
1036: if (dimc > 2 && Pc < 2 && Pf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in z direction");
1038: if (ddc->interptype == DMDA_Q1) {
1039: if (dimc == 1) {
1040: DMCreateInterpolation_DA_1D_Q1(dac,daf,A);
1041: } else if (dimc == 2) {
1042: DMCreateInterpolation_DA_2D_Q1(dac,daf,A);
1043: } else if (dimc == 3) {
1044: DMCreateInterpolation_DA_3D_Q1(dac,daf,A);
1045: } else SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_SUP,"No support for this DMDA dimension %D for interpolation type %d",dimc,(int)ddc->interptype);
1046: } else if (ddc->interptype == DMDA_Q0) {
1047: if (dimc == 1) {
1048: DMCreateInterpolation_DA_1D_Q0(dac,daf,A);
1049: } else if (dimc == 2) {
1050: DMCreateInterpolation_DA_2D_Q0(dac,daf,A);
1051: } else if (dimc == 3) {
1052: DMCreateInterpolation_DA_3D_Q0(dac,daf,A);
1053: } else SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_SUP,"No support for this DMDA dimension %D for interpolation type %d",dimc,(int)ddc->interptype);
1054: }
1055: if (scale) {
1056: DMCreateInterpolationScale((DM)dac,(DM)daf,*A,scale);
1057: }
1058: return(0);
1059: }
1063: PetscErrorCode DMCreateInjection_DA_1D(DM dac,DM daf,VecScatter *inject)
1064: {
1065: PetscErrorCode ierr;
1066: PetscInt i,i_start,m_f,Mx,dof;
1067: const PetscInt *idx_f;
1068: ISLocalToGlobalMapping ltog_f;
1069: PetscInt m_ghost,m_ghost_c;
1070: PetscInt row,i_start_ghost,mx,m_c,nc,ratioi;
1071: PetscInt i_start_c,i_start_ghost_c;
1072: PetscInt *cols;
1073: DMBoundaryType bx;
1074: Vec vecf,vecc;
1075: IS isf;
1078: DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
1079: DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
1080: if (bx == DM_BOUNDARY_PERIODIC) {
1081: ratioi = mx/Mx;
1082: if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
1083: } else {
1084: ratioi = (mx-1)/(Mx-1);
1085: if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1086: }
1088: DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
1089: DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
1090: DMGetLocalToGlobalMapping(daf,<og_f);
1091: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
1093: DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
1094: DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);
1097: /* loop over local fine grid nodes setting interpolation for those*/
1098: nc = 0;
1099: PetscMalloc1(m_f,&cols);
1102: for (i=i_start_c; i<i_start_c+m_c; i++) {
1103: PetscInt i_f = i*ratioi;
1105: if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\ni_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);
1107: row = idx_f[(i_f-i_start_ghost)];
1108: cols[nc++] = row;
1109: }
1111: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1112: ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1113: DMGetGlobalVector(dac,&vecc);
1114: DMGetGlobalVector(daf,&vecf);
1115: VecScatterCreate(vecf,isf,vecc,NULL,inject);
1116: DMRestoreGlobalVector(dac,&vecc);
1117: DMRestoreGlobalVector(daf,&vecf);
1118: ISDestroy(&isf);
1119: return(0);
1120: }
1124: PetscErrorCode DMCreateInjection_DA_2D(DM dac,DM daf,VecScatter *inject)
1125: {
1126: PetscErrorCode ierr;
1127: PetscInt i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
1128: const PetscInt *idx_c,*idx_f;
1129: ISLocalToGlobalMapping ltog_f,ltog_c;
1130: PetscInt m_ghost,n_ghost,m_ghost_c,n_ghost_c;
1131: PetscInt row,i_start_ghost,j_start_ghost,mx,m_c,my,nc,ratioi,ratioj;
1132: PetscInt i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c;
1133: PetscInt *cols;
1134: DMBoundaryType bx,by;
1135: Vec vecf,vecc;
1136: IS isf;
1139: DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
1140: DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
1141: if (bx == DM_BOUNDARY_PERIODIC) {
1142: ratioi = mx/Mx;
1143: if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
1144: } else {
1145: ratioi = (mx-1)/(Mx-1);
1146: if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1147: }
1148: if (by == DM_BOUNDARY_PERIODIC) {
1149: ratioj = my/My;
1150: if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My must be integer: my %D My %D",my,My);
1151: } else {
1152: ratioj = (my-1)/(My-1);
1153: if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
1154: }
1156: DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
1157: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
1158: DMGetLocalToGlobalMapping(daf,<og_f);
1159: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
1161: DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
1162: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
1163: DMGetLocalToGlobalMapping(dac,<og_c);
1164: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
1166: /* loop over local fine grid nodes setting interpolation for those*/
1167: nc = 0;
1168: PetscMalloc1(n_f*m_f,&cols);
1169: for (j=j_start_c; j<j_start_c+n_c; j++) {
1170: for (i=i_start_c; i<i_start_c+m_c; i++) {
1171: PetscInt i_f = i*ratioi,j_f = j*ratioj;
1172: if (j_f < j_start_ghost || j_f >= j_start_ghost+n_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
1173: j_c %D j_f %D fine ghost range [%D,%D]",j,j_f,j_start_ghost,j_start_ghost+n_ghost);
1174: if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
1175: i_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);
1176: row = idx_f[(m_ghost*(j_f-j_start_ghost) + (i_f-i_start_ghost))];
1177: cols[nc++] = row;
1178: }
1179: }
1180: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1181: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
1183: ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1184: DMGetGlobalVector(dac,&vecc);
1185: DMGetGlobalVector(daf,&vecf);
1186: VecScatterCreate(vecf,isf,vecc,NULL,inject);
1187: DMRestoreGlobalVector(dac,&vecc);
1188: DMRestoreGlobalVector(daf,&vecf);
1189: ISDestroy(&isf);
1190: return(0);
1191: }
1195: PetscErrorCode DMCreateInjection_DA_3D(DM dac,DM daf,VecScatter *inject)
1196: {
1197: PetscErrorCode ierr;
1198: PetscInt i,j,k,i_start,j_start,k_start,m_f,n_f,p_f,Mx,My,Mz;
1199: PetscInt m_ghost,n_ghost,p_ghost,m_ghost_c,n_ghost_c,p_ghost_c;
1200: PetscInt i_start_ghost,j_start_ghost,k_start_ghost;
1201: PetscInt mx,my,mz,ratioi,ratioj,ratiok;
1202: PetscInt i_start_c,j_start_c,k_start_c;
1203: PetscInt m_c,n_c,p_c;
1204: PetscInt i_start_ghost_c,j_start_ghost_c,k_start_ghost_c;
1205: PetscInt row,nc,dof;
1206: const PetscInt *idx_c,*idx_f;
1207: ISLocalToGlobalMapping ltog_f,ltog_c;
1208: PetscInt *cols;
1209: DMBoundaryType bx,by,bz;
1210: Vec vecf,vecc;
1211: IS isf;
1214: DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
1215: DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);
1217: if (bx == DM_BOUNDARY_PERIODIC) {
1218: ratioi = mx/Mx;
1219: if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx must be integer: mx %D Mx %D",mx,Mx);
1220: } else {
1221: ratioi = (mx-1)/(Mx-1);
1222: if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1223: }
1224: if (by == DM_BOUNDARY_PERIODIC) {
1225: ratioj = my/My;
1226: if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My must be integer: my %D My %D",my,My);
1227: } else {
1228: ratioj = (my-1)/(My-1);
1229: if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
1230: }
1231: if (bz == DM_BOUNDARY_PERIODIC) {
1232: ratiok = mz/Mz;
1233: if (ratiok*Mz != mz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mz/Mz must be integer: mz %D My %D",mz,Mz);
1234: } else {
1235: ratiok = (mz-1)/(Mz-1);
1236: if (ratiok*(Mz-1) != mz-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %D Mz %D",mz,Mz);
1237: }
1239: DMDAGetCorners(daf,&i_start,&j_start,&k_start,&m_f,&n_f,&p_f);
1240: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&k_start_ghost,&m_ghost,&n_ghost,&p_ghost);
1241: DMGetLocalToGlobalMapping(daf,<og_f);
1242: ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);
1244: DMDAGetCorners(dac,&i_start_c,&j_start_c,&k_start_c,&m_c,&n_c,&p_c);
1245: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&k_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
1246: DMGetLocalToGlobalMapping(dac,<og_c);
1247: ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);
1250: /* loop over local fine grid nodes setting interpolation for those*/
1251: nc = 0;
1252: PetscMalloc1(n_f*m_f*p_f,&cols);
1253: for (k=k_start_c; k<k_start_c+p_c; k++) {
1254: for (j=j_start_c; j<j_start_c+n_c; j++) {
1255: for (i=i_start_c; i<i_start_c+m_c; i++) {
1256: PetscInt i_f = i*ratioi,j_f = j*ratioj,k_f = k*ratiok;
1257: if (k_f < k_start_ghost || k_f >= k_start_ghost+p_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA "
1258: "k_c %D k_f %D fine ghost range [%D,%D]",k,k_f,k_start_ghost,k_start_ghost+p_ghost);
1259: if (j_f < j_start_ghost || j_f >= j_start_ghost+n_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA "
1260: "j_c %D j_f %D fine ghost range [%D,%D]",j,j_f,j_start_ghost,j_start_ghost+n_ghost);
1261: if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA "
1262: "i_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);
1263: row = idx_f[(m_ghost*n_ghost*(k_f-k_start_ghost) + m_ghost*(j_f-j_start_ghost) + (i_f-i_start_ghost))];
1264: cols[nc++] = row;
1265: }
1266: }
1267: }
1268: ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1269: ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
1271: ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1272: DMGetGlobalVector(dac,&vecc);
1273: DMGetGlobalVector(daf,&vecf);
1274: VecScatterCreate(vecf,isf,vecc,NULL,inject);
1275: DMRestoreGlobalVector(dac,&vecc);
1276: DMRestoreGlobalVector(daf,&vecf);
1277: ISDestroy(&isf);
1278: return(0);
1279: }
1283: PetscErrorCode DMCreateInjection_DA(DM dac,DM daf,Mat *mat)
1284: {
1285: PetscErrorCode ierr;
1286: PetscInt dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc,dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
1287: DMBoundaryType bxc,byc,bzc,bxf,byf,bzf;
1288: DMDAStencilType stc,stf;
1289: VecScatter inject = NULL;
1296: DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1297: DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1298: if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1299: if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1300: if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1301: if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1302: if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");
1303: if (Mc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in x direction");
1304: if (dimc > 1 && Nc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in y direction");
1305: if (dimc > 2 && Pc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in z direction");
1307: if (dimc == 1) {
1308: DMCreateInjection_DA_1D(dac,daf,&inject);
1309: } else if (dimc == 2) {
1310: DMCreateInjection_DA_2D(dac,daf,&inject);
1311: } else if (dimc == 3) {
1312: DMCreateInjection_DA_3D(dac,daf,&inject);
1313: }
1314: MatCreateScatter(PetscObjectComm((PetscObject)inject), inject, mat);
1315: VecScatterDestroy(&inject);
1316: return(0);
1317: }
1321: PetscErrorCode DMCreateAggregates_DA(DM dac,DM daf,Mat *rest)
1322: {
1323: PetscErrorCode ierr;
1324: PetscInt dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc;
1325: PetscInt dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
1326: DMBoundaryType bxc,byc,bzc,bxf,byf,bzf;
1327: DMDAStencilType stc,stf;
1328: PetscInt i,j,l;
1329: PetscInt i_start,j_start,l_start, m_f,n_f,p_f;
1330: PetscInt i_start_ghost,j_start_ghost,l_start_ghost,m_ghost,n_ghost,p_ghost;
1331: const PetscInt *idx_f;
1332: PetscInt i_c,j_c,l_c;
1333: PetscInt i_start_c,j_start_c,l_start_c, m_c,n_c,p_c;
1334: PetscInt i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,m_ghost_c,n_ghost_c,p_ghost_c;
1335: const PetscInt *idx_c;
1336: PetscInt d;
1337: PetscInt a;
1338: PetscInt max_agg_size;
1339: PetscInt *fine_nodes;
1340: PetscScalar *one_vec;
1341: PetscInt fn_idx;
1342: ISLocalToGlobalMapping ltogmf,ltogmc;
1349: DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1350: DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1351: if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1352: if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1353: if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1354: if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1355: if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");
1357: if (Mf < Mc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Mc %D, Mf %D", Mc, Mf);
1358: if (Nf < Nc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Nc %D, Nf %D", Nc, Nf);
1359: if (Pf < Pc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Pc %D, Pf %D", Pc, Pf);
1361: if (Pc < 0) Pc = 1;
1362: if (Pf < 0) Pf = 1;
1363: if (Nc < 0) Nc = 1;
1364: if (Nf < 0) Nf = 1;
1366: DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
1367: DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);
1369: DMGetLocalToGlobalMapping(daf,<ogmf);
1370: ISLocalToGlobalMappingGetIndices(ltogmf,&idx_f);
1372: DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
1373: DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
1375: DMGetLocalToGlobalMapping(dac,<ogmc);
1376: ISLocalToGlobalMappingGetIndices(ltogmc,&idx_c);
1378: /*
1379: Basic idea is as follows. Here's a 2D example, suppose r_x, r_y are the ratios
1380: for dimension 1 and 2 respectively.
1381: Let (i,j) be a coarse grid node. All the fine grid nodes between r_x*i and r_x*(i+1)
1382: and r_y*j and r_y*(j+1) will be grouped into the same coarse grid agregate.
1383: Each specific dof on the fine grid is mapped to one dof on the coarse grid.
1384: */
1386: max_agg_size = (Mf/Mc+1)*(Nf/Nc+1)*(Pf/Pc+1);
1388: /* create the matrix that will contain the restriction operator */
1389: MatCreateAIJ(PetscObjectComm((PetscObject)daf), m_c*n_c*p_c*dofc, m_f*n_f*p_f*doff, Mc*Nc*Pc*dofc, Mf*Nf*Pf*doff,
1390: max_agg_size, NULL, max_agg_size, NULL, rest);
1392: /* store nodes in the fine grid here */
1393: PetscMalloc2(max_agg_size, &one_vec,max_agg_size, &fine_nodes);
1394: for (i=0; i<max_agg_size; i++) one_vec[i] = 1.0;
1396: /* loop over all coarse nodes */
1397: for (l_c=l_start_c; l_c<l_start_c+p_c; l_c++) {
1398: for (j_c=j_start_c; j_c<j_start_c+n_c; j_c++) {
1399: for (i_c=i_start_c; i_c<i_start_c+m_c; i_c++) {
1400: for (d=0; d<dofc; d++) {
1401: /* convert to local "natural" numbering and then to PETSc global numbering */
1402: a = idx_c[dofc*(m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c))] + d;
1404: fn_idx = 0;
1405: /* Corresponding fine points are all points (i_f, j_f, l_f) such that
1406: i_c*Mf/Mc <= i_f < (i_c+1)*Mf/Mc
1407: (same for other dimensions)
1408: */
1409: for (l=l_c*Pf/Pc; l<PetscMin((l_c+1)*Pf/Pc,Pf); l++) {
1410: for (j=j_c*Nf/Nc; j<PetscMin((j_c+1)*Nf/Nc,Nf); j++) {
1411: for (i=i_c*Mf/Mc; i<PetscMin((i_c+1)*Mf/Mc,Mf); i++) {
1412: fine_nodes[fn_idx] = idx_f[doff*(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))] + d;
1413: fn_idx++;
1414: }
1415: }
1416: }
1417: /* add all these points to one aggregate */
1418: MatSetValues(*rest, 1, &a, fn_idx, fine_nodes, one_vec, INSERT_VALUES);
1419: }
1420: }
1421: }
1422: }
1423: ISLocalToGlobalMappingRestoreIndices(ltogmf,&idx_f);
1424: ISLocalToGlobalMappingRestoreIndices(ltogmc,&idx_c);
1425: PetscFree2(one_vec,fine_nodes);
1426: MatAssemblyBegin(*rest, MAT_FINAL_ASSEMBLY);
1427: MatAssemblyEnd(*rest, MAT_FINAL_ASSEMBLY);
1428: return(0);
1429: }