Actual source code: dmproject.c
1: #include <petsc/private/dmimpl.h>
2: #include <petscdm.h>
3: #include <petscdmda.h>
4: #include <petscdmplex.h>
5: #include <petscdmswarm.h>
6: #include <petscksp.h>
7: #include <petscblaslapack.h>
9: #include <petsc/private/dmswarmimpl.h>
10: #include "../src/dm/impls/swarm/data_bucket.h" // For DataBucket internals
12: typedef struct _projectConstraintsCtx {
13: DM dm;
14: Vec mask;
15: } projectConstraintsCtx;
17: static PetscErrorCode MatMult_GlobalToLocalNormal(Mat CtC, Vec x, Vec y)
18: {
19: DM dm;
20: Vec local, mask;
21: projectConstraintsCtx *ctx;
23: PetscFunctionBegin;
24: PetscCall(MatShellGetContext(CtC, &ctx));
25: dm = ctx->dm;
26: mask = ctx->mask;
27: PetscCall(DMGetLocalVector(dm, &local));
28: PetscCall(DMGlobalToLocalBegin(dm, x, INSERT_VALUES, local));
29: PetscCall(DMGlobalToLocalEnd(dm, x, INSERT_VALUES, local));
30: if (mask) PetscCall(VecPointwiseMult(local, mask, local));
31: PetscCall(VecSet(y, 0.));
32: PetscCall(DMLocalToGlobalBegin(dm, local, ADD_VALUES, y));
33: PetscCall(DMLocalToGlobalEnd(dm, local, ADD_VALUES, y));
34: PetscCall(DMRestoreLocalVector(dm, &local));
35: PetscFunctionReturn(PETSC_SUCCESS);
36: }
38: static PetscErrorCode DMGlobalToLocalSolve_project1(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx)
39: {
40: PetscInt f;
42: PetscFunctionBegin;
43: for (f = 0; f < Nf; f++) u[f] = 1.;
44: PetscFunctionReturn(PETSC_SUCCESS);
45: }
47: /*@
48: DMGlobalToLocalSolve - Solve for the global vector that is mapped to a given local vector by `DMGlobalToLocalBegin()`/`DMGlobalToLocalEnd()` with mode
49: `INSERT_VALUES`.
51: Collective
53: Input Parameters:
54: + dm - The `DM` object
55: . x - The local vector
56: - y - The global vector: the input value of globalVec is used as an initial guess
58: Output Parameter:
59: . y - The least-squares solution
61: Level: advanced
63: Note:
64: It is assumed that the sum of all the local vector sizes is greater than or equal to the global vector size, so the solution is
65: a least-squares solution. It is also assumed that `DMLocalToGlobalBegin()`/`DMLocalToGlobalEnd()` with mode `ADD_VALUES` is the adjoint of the
66: global-to-local map, so that the least-squares solution may be found by the normal equations.
68: If the `DM` is of type `DMPLEX`, then `y` is the solution of $ L^T * D * L * y = L^T * D * x $, where $D$ is a diagonal mask that is 1 for every point in
69: the union of the closures of the local cells and 0 otherwise. This difference is only relevant if there are anchor points that are not in the
70: closure of any local cell (see `DMPlexGetAnchors()`/`DMPlexSetAnchors()`).
72: What is L?
74: If this solves for a global vector from a local vector why is not called `DMLocalToGlobalSolve()`?
76: .seealso: [](ch_dmbase), `DM`, `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToGlobalEnd()`, `DMPlexGetAnchors()`, `DMPlexSetAnchors()`
77: @*/
78: PetscErrorCode DMGlobalToLocalSolve(DM dm, Vec x, Vec y)
79: {
80: Mat CtC;
81: PetscInt n, N, cStart, cEnd, c;
82: PetscBool isPlex;
83: KSP ksp;
84: PC pc;
85: Vec global, mask = NULL;
86: projectConstraintsCtx ctx;
88: PetscFunctionBegin;
89: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
90: if (isPlex) {
91: /* mark points in the closure */
92: PetscCall(DMCreateLocalVector(dm, &mask));
93: PetscCall(VecSet(mask, 0.0));
94: PetscCall(DMPlexGetSimplexOrBoxCells(dm, 0, &cStart, &cEnd));
95: if (cEnd > cStart) {
96: PetscScalar *ones;
97: PetscInt numValues, i;
99: PetscCall(DMPlexVecGetClosure(dm, NULL, mask, cStart, &numValues, NULL));
100: PetscCall(PetscMalloc1(numValues, &ones));
101: for (i = 0; i < numValues; i++) ones[i] = 1.;
102: for (c = cStart; c < cEnd; c++) PetscCall(DMPlexVecSetClosure(dm, NULL, mask, c, ones, INSERT_VALUES));
103: PetscCall(PetscFree(ones));
104: }
105: } else {
106: PetscBool hasMask;
108: PetscCall(DMHasNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &hasMask));
109: if (!hasMask) {
110: PetscErrorCode (**func)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
111: void **ctx;
112: PetscInt Nf, f;
114: PetscCall(DMGetNumFields(dm, &Nf));
115: PetscCall(PetscMalloc2(Nf, &func, Nf, &ctx));
116: for (f = 0; f < Nf; ++f) {
117: func[f] = DMGlobalToLocalSolve_project1;
118: ctx[f] = NULL;
119: }
120: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
121: PetscCall(DMProjectFunctionLocal(dm, 0.0, func, ctx, INSERT_ALL_VALUES, mask));
122: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
123: PetscCall(PetscFree2(func, ctx));
124: }
125: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
126: }
127: ctx.dm = dm;
128: ctx.mask = mask;
129: PetscCall(VecGetSize(y, &N));
130: PetscCall(VecGetLocalSize(y, &n));
131: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), &CtC));
132: PetscCall(MatSetSizes(CtC, n, n, N, N));
133: PetscCall(MatSetType(CtC, MATSHELL));
134: PetscCall(MatSetUp(CtC));
135: PetscCall(MatShellSetContext(CtC, &ctx));
136: PetscCall(MatShellSetOperation(CtC, MATOP_MULT, (void (*)(void))MatMult_GlobalToLocalNormal));
137: PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp));
138: PetscCall(KSPSetOperators(ksp, CtC, CtC));
139: PetscCall(KSPSetType(ksp, KSPCG));
140: PetscCall(KSPGetPC(ksp, &pc));
141: PetscCall(PCSetType(pc, PCNONE));
142: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
143: PetscCall(KSPSetUp(ksp));
144: PetscCall(DMGetGlobalVector(dm, &global));
145: PetscCall(VecSet(global, 0.));
146: if (mask) PetscCall(VecPointwiseMult(x, mask, x));
147: PetscCall(DMLocalToGlobalBegin(dm, x, ADD_VALUES, global));
148: PetscCall(DMLocalToGlobalEnd(dm, x, ADD_VALUES, global));
149: PetscCall(KSPSolve(ksp, global, y));
150: PetscCall(DMRestoreGlobalVector(dm, &global));
151: /* clean up */
152: PetscCall(KSPDestroy(&ksp));
153: PetscCall(MatDestroy(&CtC));
154: if (isPlex) {
155: PetscCall(VecDestroy(&mask));
156: } else {
157: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
158: }
159: PetscFunctionReturn(PETSC_SUCCESS);
160: }
162: /*@C
163: DMProjectField - This projects a given function of the input fields into the function space provided by a `DM`, putting the coefficients in a global vector.
165: Collective
167: Input Parameters:
168: + dm - The `DM`
169: . time - The time
170: . U - The input field vector
171: . funcs - The functions to evaluate, one per field
172: - mode - The insertion mode for values
174: Output Parameter:
175: . X - The output vector
177: Calling sequence of `funcs`:
178: + dim - The spatial dimension
179: . Nf - The number of input fields
180: . NfAux - The number of input auxiliary fields
181: . uOff - The offset of each field in `u`
182: . uOff_x - The offset of each field in `u_x`
183: . u - The field values at this point in space
184: . u_t - The field time derivative at this point in space (or `NULL`)
185: . u_x - The field derivatives at this point in space
186: . aOff - The offset of each auxiliary field in `u`
187: . aOff_x - The offset of each auxiliary field in `u_x`
188: . a - The auxiliary field values at this point in space
189: . a_t - The auxiliary field time derivative at this point in space (or `NULL`)
190: . a_x - The auxiliary field derivatives at this point in space
191: . t - The current time
192: . x - The coordinates of this point
193: . numConstants - The number of constants
194: . constants - The value of each constant
195: - f - The value of the function at this point in space
197: Level: advanced
199: Note:
200: There are three different `DM`s that potentially interact in this function. The output `dm`, specifies the layout of the values calculates by the function.
201: The input `DM`, attached to `U`, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
202: a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the
203: auxiliary field vector, which is attached to `dm`, can also be different. It can have a different topology, number of fields, and discretizations.
205: .seealso: [](ch_dmbase), `DM`, `DMProjectFieldLocal()`, `DMProjectFieldLabelLocal()`, `DMProjectFunction()`, `DMComputeL2Diff()`
206: @*/
207: PetscErrorCode DMProjectField(DM dm, PetscReal time, Vec U, void (**funcs)(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[]), InsertMode mode, Vec X)
208: {
209: Vec localX, localU;
210: DM dmIn;
212: PetscFunctionBegin;
214: PetscCall(DMGetLocalVector(dm, &localX));
215: /* We currently check whether locU == locX to see if we need to apply BC */
216: if (U != X) {
217: PetscCall(VecGetDM(U, &dmIn));
218: PetscCall(DMGetLocalVector(dmIn, &localU));
219: } else {
220: dmIn = dm;
221: localU = localX;
222: }
223: PetscCall(DMGlobalToLocalBegin(dmIn, U, INSERT_VALUES, localU));
224: PetscCall(DMGlobalToLocalEnd(dmIn, U, INSERT_VALUES, localU));
225: PetscCall(DMProjectFieldLocal(dm, time, localU, funcs, mode, localX));
226: PetscCall(DMLocalToGlobalBegin(dm, localX, mode, X));
227: PetscCall(DMLocalToGlobalEnd(dm, localX, mode, X));
228: if (mode == INSERT_VALUES || mode == INSERT_ALL_VALUES || mode == INSERT_BC_VALUES) {
229: Mat cMat;
231: PetscCall(DMGetDefaultConstraints(dm, NULL, &cMat, NULL));
232: if (cMat) PetscCall(DMGlobalToLocalSolve(dm, localX, X));
233: }
234: PetscCall(DMRestoreLocalVector(dm, &localX));
235: if (U != X) PetscCall(DMRestoreLocalVector(dmIn, &localU));
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: /********************* Adaptive Interpolation **************************/
241: /* See the discussion of Adaptive Interpolation in manual/high_level_mg.rst */
242: PetscErrorCode DMAdaptInterpolator(DM dmc, DM dmf, Mat In, KSP smoother, Mat MF, Mat MC, Mat *InAdapt, void *user)
243: {
244: Mat globalA, AF;
245: Vec tmp;
246: const PetscScalar *af, *ac;
247: PetscScalar *A, *b, *x, *workscalar;
248: PetscReal *w, *sing, *workreal, rcond = PETSC_SMALL;
249: PetscBLASInt M, N, one = 1, irank, lwrk, info;
250: PetscInt debug = 0, rStart, rEnd, r, maxcols = 0, k, Nc, ldac, ldaf;
251: PetscBool allocVc = PETSC_FALSE;
253: PetscFunctionBegin;
254: PetscCall(PetscLogEventBegin(DM_AdaptInterpolator, dmc, dmf, 0, 0));
255: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dm_interpolator_adapt_debug", &debug, NULL));
256: PetscCall(MatGetSize(MF, NULL, &Nc));
257: PetscCall(MatDuplicate(In, MAT_SHARE_NONZERO_PATTERN, InAdapt));
258: PetscCall(MatGetOwnershipRange(In, &rStart, &rEnd));
259: #if 0
260: PetscCall(MatGetMaxRowLen(In, &maxcols));
261: #else
262: for (r = rStart; r < rEnd; ++r) {
263: PetscInt ncols;
265: PetscCall(MatGetRow(In, r, &ncols, NULL, NULL));
266: maxcols = PetscMax(maxcols, ncols);
267: PetscCall(MatRestoreRow(In, r, &ncols, NULL, NULL));
268: }
269: #endif
270: if (Nc < maxcols) PetscCall(PetscPrintf(PETSC_COMM_SELF, "The number of input vectors %" PetscInt_FMT " < %" PetscInt_FMT " the maximum number of column entries\n", Nc, maxcols));
271: for (k = 0; k < Nc && debug; ++k) {
272: char name[PETSC_MAX_PATH_LEN];
273: const char *prefix;
274: Vec vc, vf;
276: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)smoother, &prefix));
278: if (MC) {
279: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sCoarse Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
280: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
281: PetscCall(PetscObjectSetName((PetscObject)vc, name));
282: PetscCall(VecViewFromOptions(vc, NULL, "-dm_adapt_interp_view_coarse"));
283: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
284: }
285: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sFine Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
286: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
287: PetscCall(PetscObjectSetName((PetscObject)vf, name));
288: PetscCall(VecViewFromOptions(vf, NULL, "-dm_adapt_interp_view_fine"));
289: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
290: }
291: PetscCall(PetscBLASIntCast(3 * PetscMin(Nc, maxcols) + PetscMax(2 * PetscMin(Nc, maxcols), PetscMax(Nc, maxcols)), &lwrk));
292: PetscCall(PetscMalloc7(Nc * maxcols, &A, PetscMax(Nc, maxcols), &b, Nc, &w, maxcols, &x, maxcols, &sing, lwrk, &workscalar, 5 * PetscMin(Nc, maxcols), &workreal));
293: /* w_k = \frac{\HC{v_k} B_l v_k}{\HC{v_k} A_l v_k} or the inverse Rayleigh quotient, which we calculate using \frac{\HC{v_k} v_k}{\HC{v_k} B^{-1}_l A_l v_k} */
294: PetscCall(KSPGetOperators(smoother, &globalA, NULL));
296: PetscCall(MatMatMult(globalA, MF, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &AF));
297: for (k = 0; k < Nc; ++k) {
298: PetscScalar vnorm, vAnorm;
299: Vec vf;
301: w[k] = 1.0;
302: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
303: PetscCall(MatDenseGetColumnVecRead(AF, k, &tmp));
304: PetscCall(VecDot(vf, vf, &vnorm));
305: #if 0
306: PetscCall(DMGetGlobalVector(dmf, &tmp2));
307: PetscCall(KSPSolve(smoother, tmp, tmp2));
308: PetscCall(VecDot(vf, tmp2, &vAnorm));
309: PetscCall(DMRestoreGlobalVector(dmf, &tmp2));
310: #else
311: PetscCall(VecDot(vf, tmp, &vAnorm));
312: #endif
313: w[k] = PetscRealPart(vnorm) / PetscRealPart(vAnorm);
314: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
315: PetscCall(MatDenseRestoreColumnVecRead(AF, k, &tmp));
316: }
317: PetscCall(MatDestroy(&AF));
318: if (!MC) {
319: allocVc = PETSC_TRUE;
320: PetscCall(MatTransposeMatMult(In, MF, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &MC));
321: }
322: /* Solve a LS system for each fine row
323: MATT: Can we generalize to the case where Nc for the fine space
324: is different for Nc for the coarse? */
325: PetscCall(MatDenseGetArrayRead(MF, &af));
326: PetscCall(MatDenseGetLDA(MF, &ldaf));
327: PetscCall(MatDenseGetArrayRead(MC, &ac));
328: PetscCall(MatDenseGetLDA(MC, &ldac));
329: for (r = rStart; r < rEnd; ++r) {
330: PetscInt ncols, c;
331: const PetscInt *cols;
332: const PetscScalar *vals;
334: PetscCall(MatGetRow(In, r, &ncols, &cols, &vals));
335: for (k = 0; k < Nc; ++k) {
336: /* Need to fit lowest mode exactly */
337: const PetscReal wk = ((ncols == 1) && (k > 0)) ? 0.0 : PetscSqrtReal(w[k]);
339: /* b_k = \sqrt{w_k} f^{F,k}_r */
340: b[k] = wk * af[r - rStart + k * ldaf];
341: /* A_{kc} = \sqrt{w_k} f^{C,k}_c */
342: /* TODO Must pull out VecScatter from In, scatter in vc[k] values up front, and access them indirectly just as in MatMult() */
343: for (c = 0; c < ncols; ++c) {
344: /* This is element (k, c) of A */
345: A[c * Nc + k] = wk * ac[cols[c] - rStart + k * ldac];
346: }
347: }
348: PetscCall(PetscBLASIntCast(Nc, &M));
349: PetscCall(PetscBLASIntCast(ncols, &N));
350: if (debug) {
351: #if defined(PETSC_USE_COMPLEX)
352: PetscScalar *tmp;
353: PetscInt j;
355: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
356: for (j = 0; j < Nc; ++j) tmp[j] = w[j];
357: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, tmp));
358: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
359: for (j = 0; j < Nc; ++j) tmp[j] = b[j];
360: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, tmp));
361: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
362: #else
363: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, w));
364: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
365: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, b));
366: #endif
367: }
368: #if defined(PETSC_USE_COMPLEX)
369: /* ZGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, RWORK, INFO) */
370: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, workreal, &info));
371: #else
372: /* DGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, INFO) */
373: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, &info));
374: #endif
375: PetscCheck(info >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Bad argument to GELSS");
376: PetscCheck(info <= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "SVD failed to converge");
377: if (debug) {
378: PetscCall(PetscPrintf(PETSC_COMM_SELF, "rank %" PetscBLASInt_FMT " rcond %g\n", irank, (double)rcond));
379: #if defined(PETSC_USE_COMPLEX)
380: {
381: PetscScalar *tmp;
382: PetscInt j;
384: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
385: for (j = 0; j < PetscMin(Nc, ncols); ++j) tmp[j] = sing[j];
386: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, tmp));
387: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
388: }
389: #else
390: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, sing));
391: #endif
392: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS old P", ncols, 1, vals));
393: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS sol", ncols, 1, b));
394: }
395: PetscCall(MatSetValues(*InAdapt, 1, &r, ncols, cols, b, INSERT_VALUES));
396: PetscCall(MatRestoreRow(In, r, &ncols, &cols, &vals));
397: }
398: PetscCall(MatDenseRestoreArrayRead(MF, &af));
399: PetscCall(MatDenseRestoreArrayRead(MC, &ac));
400: PetscCall(PetscFree7(A, b, w, x, sing, workscalar, workreal));
401: if (allocVc) PetscCall(MatDestroy(&MC));
402: PetscCall(MatAssemblyBegin(*InAdapt, MAT_FINAL_ASSEMBLY));
403: PetscCall(MatAssemblyEnd(*InAdapt, MAT_FINAL_ASSEMBLY));
404: PetscCall(PetscLogEventEnd(DM_AdaptInterpolator, dmc, dmf, 0, 0));
405: PetscFunctionReturn(PETSC_SUCCESS);
406: }
408: PetscErrorCode DMCheckInterpolator(DM dmf, Mat In, Mat MC, Mat MF, PetscReal tol)
409: {
410: Vec tmp;
411: PetscReal norminf, norm2, maxnorminf = 0.0, maxnorm2 = 0.0;
412: PetscInt k, Nc;
414: PetscFunctionBegin;
415: PetscCall(DMGetGlobalVector(dmf, &tmp));
416: PetscCall(MatViewFromOptions(In, NULL, "-dm_interpolator_adapt_error"));
417: PetscCall(MatGetSize(MF, NULL, &Nc));
418: for (k = 0; k < Nc; ++k) {
419: Vec vc, vf;
421: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
422: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
423: PetscCall(MatMult(In, vc, tmp));
424: PetscCall(VecAXPY(tmp, -1.0, vf));
425: PetscCall(VecViewFromOptions(vc, NULL, "-dm_interpolator_adapt_error"));
426: PetscCall(VecViewFromOptions(vf, NULL, "-dm_interpolator_adapt_error"));
427: PetscCall(VecViewFromOptions(tmp, NULL, "-dm_interpolator_adapt_error"));
428: PetscCall(VecNorm(tmp, NORM_INFINITY, &norminf));
429: PetscCall(VecNorm(tmp, NORM_2, &norm2));
430: maxnorminf = PetscMax(maxnorminf, norminf);
431: maxnorm2 = PetscMax(maxnorm2, norm2);
432: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)dmf), "Coarse vec %" PetscInt_FMT " ||vf - P vc||_\\infty %g, ||vf - P vc||_2 %g\n", k, (double)norminf, (double)norm2));
433: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
434: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
435: }
436: PetscCall(DMRestoreGlobalVector(dmf, &tmp));
437: PetscCheck(maxnorm2 <= tol, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_WRONG, "max_k ||vf_k - P vc_k||_2 %g > tol %g", (double)maxnorm2, (double)tol);
438: PetscFunctionReturn(PETSC_SUCCESS);
439: }
441: // Project particles to field
442: // M_f u_f = M_p u_p
443: // u_f = M^{-1}_f M_p u_p
444: static PetscErrorCode DMSwarmProjectField_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
445: {
446: KSP ksp;
447: Mat M_f, M_p; // TODO Should cache these
448: Vec rhs;
449: const char *prefix;
451: PetscFunctionBegin;
452: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
453: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
454: PetscCall(DMGetGlobalVector(dm, &rhs));
455: PetscCall(MatMultTranspose(M_p, u_p, rhs));
457: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
458: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
459: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
460: PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
461: PetscCall(KSPSetFromOptions(ksp));
463: PetscCall(KSPSetOperators(ksp, M_f, M_f));
464: PetscCall(KSPSolve(ksp, rhs, u_f));
466: PetscCall(DMRestoreGlobalVector(dm, &rhs));
467: PetscCall(KSPDestroy(&ksp));
468: PetscCall(MatDestroy(&M_f));
469: PetscCall(MatDestroy(&M_p));
470: PetscFunctionReturn(PETSC_SUCCESS);
471: }
473: // Project field to particles
474: // M_p u_p = M_f u_f
475: // u_p = M^+_p M_f u_f
476: static PetscErrorCode DMSwarmProjectParticles_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
477: {
478: KSP ksp;
479: PC pc;
480: Mat M_f, M_p, PM_p;
481: Vec rhs;
482: PetscBool isBjacobi;
483: const char *prefix;
485: PetscFunctionBegin;
486: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
487: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
488: PetscCall(DMGetGlobalVector(dm, &rhs));
489: PetscCall(MatMultTranspose(M_f, u_f, rhs));
491: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
492: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
493: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
494: PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
495: PetscCall(KSPSetFromOptions(ksp));
497: PetscCall(KSPGetPC(ksp, &pc));
498: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
499: if (isBjacobi) {
500: PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PM_p));
501: } else {
502: PM_p = M_p;
503: PetscCall(PetscObjectReference((PetscObject)PM_p));
504: }
505: PetscCall(KSPSetOperators(ksp, M_p, PM_p));
506: PetscCall(KSPSolveTranspose(ksp, rhs, u_p));
508: PetscCall(DMRestoreGlobalVector(dm, &rhs));
509: PetscCall(KSPDestroy(&ksp));
510: PetscCall(MatDestroy(&M_f));
511: PetscCall(MatDestroy(&M_p));
512: PetscCall(MatDestroy(&PM_p));
513: PetscFunctionReturn(PETSC_SUCCESS);
514: }
516: static PetscErrorCode DMSwarmProjectFields_Plex_Internal(DM sw, DM dm, PetscInt Nf, const char *fieldnames[], Vec vec, ScatterMode mode)
517: {
518: PetscDS ds;
519: Vec u;
520: PetscInt f = 0, bs, *Nc;
522: PetscFunctionBegin;
523: PetscCall(DMGetDS(dm, &ds));
524: PetscCall(PetscDSGetComponents(ds, &Nc));
525: PetscCall(PetscCitationsRegister(SwarmProjCitation, &SwarmProjcite));
526: PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Currently supported only for a single field");
527: PetscCall(DMSwarmVectorDefineField(sw, fieldnames[f]));
528: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fieldnames[f], &u));
529: PetscCall(VecGetBlockSize(u, &bs));
530: PetscCheck(Nc[f] == bs, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Field %" PetscInt_FMT " components %" PetscInt_FMT " != %" PetscInt_FMT " blocksize for swarm field %s", f, Nc[f], bs, fieldnames[f]);
531: if (mode == SCATTER_FORWARD) {
532: PetscCall(DMSwarmProjectField_Conservative_PLEX(sw, dm, u, vec));
533: } else {
534: PetscCall(DMSwarmProjectParticles_Conservative_PLEX(sw, dm, u, vec));
535: }
536: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fieldnames[0], &u));
537: PetscFunctionReturn(PETSC_SUCCESS);
538: }
540: static PetscErrorCode DMSwarmProjectField_ApproxQ1_DA_2D(DM swarm, PetscReal *swarm_field, DM dm, Vec v_field)
541: {
542: Vec v_field_l, denom_l, coor_l, denom;
543: PetscScalar *_field_l, *_denom_l;
544: PetscInt k, p, e, npoints, nel, npe;
545: PetscInt *mpfield_cell;
546: PetscReal *mpfield_coor;
547: const PetscInt *element_list;
548: const PetscInt *element;
549: PetscScalar xi_p[2], Ni[4];
550: const PetscScalar *_coor;
552: PetscFunctionBegin;
553: PetscCall(VecZeroEntries(v_field));
555: PetscCall(DMGetLocalVector(dm, &v_field_l));
556: PetscCall(DMGetGlobalVector(dm, &denom));
557: PetscCall(DMGetLocalVector(dm, &denom_l));
558: PetscCall(VecZeroEntries(v_field_l));
559: PetscCall(VecZeroEntries(denom));
560: PetscCall(VecZeroEntries(denom_l));
562: PetscCall(VecGetArray(v_field_l, &_field_l));
563: PetscCall(VecGetArray(denom_l, &_denom_l));
565: PetscCall(DMGetCoordinatesLocal(dm, &coor_l));
566: PetscCall(VecGetArrayRead(coor_l, &_coor));
568: PetscCall(DMDAGetElements(dm, &nel, &npe, &element_list));
569: PetscCall(DMSwarmGetLocalSize(swarm, &npoints));
570: PetscCall(DMSwarmGetField(swarm, DMSwarmPICField_coor, NULL, NULL, (void **)&mpfield_coor));
571: PetscCall(DMSwarmGetField(swarm, DMSwarmPICField_cellid, NULL, NULL, (void **)&mpfield_cell));
573: for (p = 0; p < npoints; p++) {
574: PetscReal *coor_p;
575: const PetscScalar *x0;
576: const PetscScalar *x2;
577: PetscScalar dx[2];
579: e = mpfield_cell[p];
580: coor_p = &mpfield_coor[2 * p];
581: element = &element_list[npe * e];
583: /* compute local coordinates: (xp-x0)/dx = (xip+1)/2 */
584: x0 = &_coor[2 * element[0]];
585: x2 = &_coor[2 * element[2]];
587: dx[0] = x2[0] - x0[0];
588: dx[1] = x2[1] - x0[1];
590: xi_p[0] = 2.0 * (coor_p[0] - x0[0]) / dx[0] - 1.0;
591: xi_p[1] = 2.0 * (coor_p[1] - x0[1]) / dx[1] - 1.0;
593: /* evaluate basis functions */
594: Ni[0] = 0.25 * (1.0 - xi_p[0]) * (1.0 - xi_p[1]);
595: Ni[1] = 0.25 * (1.0 + xi_p[0]) * (1.0 - xi_p[1]);
596: Ni[2] = 0.25 * (1.0 + xi_p[0]) * (1.0 + xi_p[1]);
597: Ni[3] = 0.25 * (1.0 - xi_p[0]) * (1.0 + xi_p[1]);
599: for (k = 0; k < npe; k++) {
600: _field_l[element[k]] += Ni[k] * swarm_field[p];
601: _denom_l[element[k]] += Ni[k];
602: }
603: }
605: PetscCall(DMSwarmRestoreField(swarm, DMSwarmPICField_cellid, NULL, NULL, (void **)&mpfield_cell));
606: PetscCall(DMSwarmRestoreField(swarm, DMSwarmPICField_coor, NULL, NULL, (void **)&mpfield_coor));
607: PetscCall(DMDARestoreElements(dm, &nel, &npe, &element_list));
608: PetscCall(VecRestoreArrayRead(coor_l, &_coor));
609: PetscCall(VecRestoreArray(v_field_l, &_field_l));
610: PetscCall(VecRestoreArray(denom_l, &_denom_l));
612: PetscCall(DMLocalToGlobalBegin(dm, v_field_l, ADD_VALUES, v_field));
613: PetscCall(DMLocalToGlobalEnd(dm, v_field_l, ADD_VALUES, v_field));
614: PetscCall(DMLocalToGlobalBegin(dm, denom_l, ADD_VALUES, denom));
615: PetscCall(DMLocalToGlobalEnd(dm, denom_l, ADD_VALUES, denom));
617: PetscCall(VecPointwiseDivide(v_field, v_field, denom));
619: PetscCall(DMRestoreLocalVector(dm, &v_field_l));
620: PetscCall(DMRestoreLocalVector(dm, &denom_l));
621: PetscCall(DMRestoreGlobalVector(dm, &denom));
622: PetscFunctionReturn(PETSC_SUCCESS);
623: }
625: static PetscErrorCode DMSwarmProjectFields_DA_Internal(DM swarm, DM celldm, PetscInt nfields, DMSwarmDataField dfield[], Vec vecs[], ScatterMode mode)
626: {
627: PetscInt f, dim;
628: DMDAElementType etype;
630: PetscFunctionBegin;
631: PetscCall(DMDAGetElementType(celldm, &etype));
632: PetscCheck(etype != DMDA_ELEMENT_P1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Only Q1 DMDA supported");
633: PetscCheck(mode == SCATTER_FORWARD, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Mapping the continuum to particles is not currently supported for DMDA");
635: PetscCall(DMGetDimension(swarm, &dim));
636: switch (dim) {
637: case 2:
638: for (f = 0; f < nfields; f++) {
639: PetscReal *swarm_field;
641: PetscCall(DMSwarmDataFieldGetEntries(dfield[f], (void **)&swarm_field));
642: PetscCall(DMSwarmProjectField_ApproxQ1_DA_2D(swarm, swarm_field, celldm, vecs[f]));
643: }
644: break;
645: case 3:
646: SETERRQ(PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "No support for 3D");
647: default:
648: break;
649: }
650: PetscFunctionReturn(PETSC_SUCCESS);
651: }
653: /*@C
654: DMSwarmProjectFields - Project a set of swarm fields onto another `DM`
656: Collective
658: Input Parameters:
659: + sw - the `DMSWARM`
660: . dm - the `DM`, or `NULL` to use the cell `DM`
661: . nfields - the number of swarm fields to project
662: . fieldnames - the textual names of the swarm fields to project
663: . fields - an array of `Vec`'s of length nfields
664: - mode - if `SCATTER_FORWARD` then map particles to the continuum, and if `SCATTER_REVERSE` map the continuum to particles
666: Level: beginner
668: Notes:
669: Currently, there are two available projection methods. The first is conservative projection, used for a `DMPLEX` cell `DM`.
670: The second is the averaging which is used for a `DMDA` cell `DM`
672: $$
673: \phi_i = \sum_{p=0}^{np} N_i(x_p) \phi_p dJ / \sum_{p=0}^{np} N_i(x_p) dJ
674: $$
676: where $\phi_p $ is the swarm field at point $p$, $N_i()$ is the cell `DM` basis function at vertex $i$, $dJ$ is the determinant of the cell Jacobian and
677: $\phi_i$ is the projected vertex value of the field $\phi$.
679: The user is responsible for destroying both the array and the individual `Vec` objects.
681: For the `DMPLEX` case, there is only a single vector, so the field layout in the `DMPLEX` must match the requested fields from the `DMSwarm`.
683: For averaging projection, nly swarm fields registered with data type of `PETSC_REAL` can be projected onto the cell `DM`, and only swarm fields of block size = 1 can currently be projected.
685: .seealso: [](ch_dmbase), `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType`
686: @*/
687: PetscErrorCode DMSwarmProjectFields(DM sw, DM dm, PetscInt nfields, const char *fieldnames[], Vec fields[], ScatterMode mode)
688: {
689: DM_Swarm *swarm = (DM_Swarm *)sw->data;
690: DMSwarmDataField *gfield;
691: PetscBool isDA, isPlex;
692: MPI_Comm comm;
694: PetscFunctionBegin;
695: DMSWARMPICVALID(sw);
696: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
697: if (!dm) PetscCall(DMSwarmGetCellDM(sw, &dm));
698: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &isDA));
699: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
700: PetscCall(PetscMalloc1(nfields, &gfield));
701: for (PetscInt f = 0; f < nfields; ++f) PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldnames[f], &gfield[f]));
703: if (isDA) {
704: for (PetscInt f = 0; f < nfields; f++) {
705: PetscCheck(gfield[f]->petsc_type == PETSC_REAL, comm, PETSC_ERR_SUP, "Projection only valid for fields using a data type = PETSC_REAL");
706: PetscCheck(gfield[f]->bs == 1, comm, PETSC_ERR_SUP, "Projection only valid for fields with block size = 1");
707: }
708: PetscCall(DMSwarmProjectFields_DA_Internal(sw, dm, nfields, gfield, fields, mode));
709: } else if (isPlex) {
710: PetscInt Nf;
712: PetscCall(DMGetNumFields(dm, &Nf));
713: PetscCheck(Nf == nfields, comm, PETSC_ERR_ARG_WRONG, "Number of DM fields %" PetscInt_FMT " != %" PetscInt_FMT " number of requested Swarm fields", Nf, nfields);
714: PetscCall(DMSwarmProjectFields_Plex_Internal(sw, dm, nfields, fieldnames, fields[0], mode));
715: } else SETERRQ(PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only supported for cell DMs of type DMDA and DMPLEX");
717: PetscCall(PetscFree(gfield));
718: PetscFunctionReturn(PETSC_SUCCESS);
719: }