Actual source code: dmproject.c
1: #include <petsc/private/dmimpl.h>
2: #include <petscdm.h>
3: #include <petscdmda.h>
4: #include <petscdmplex.h>
5: #include <petscdmswarm.h>
6: #include <petscksp.h>
7: #include <petscblaslapack.h>
9: #include <petsc/private/dmswarmimpl.h>
10: #include "../src/dm/impls/swarm/data_bucket.h" // For DataBucket internals
11: #include "petscmath.h"
13: typedef struct _projectConstraintsCtx {
14: DM dm;
15: Vec mask;
16: } projectConstraintsCtx;
18: static PetscErrorCode MatMult_GlobalToLocalNormal(Mat CtC, Vec x, Vec y)
19: {
20: DM dm;
21: Vec local, mask;
22: projectConstraintsCtx *ctx;
24: PetscFunctionBegin;
25: PetscCall(MatShellGetContext(CtC, &ctx));
26: dm = ctx->dm;
27: mask = ctx->mask;
28: PetscCall(DMGetLocalVector(dm, &local));
29: PetscCall(DMGlobalToLocalBegin(dm, x, INSERT_VALUES, local));
30: PetscCall(DMGlobalToLocalEnd(dm, x, INSERT_VALUES, local));
31: if (mask) PetscCall(VecPointwiseMult(local, mask, local));
32: PetscCall(VecSet(y, 0.));
33: PetscCall(DMLocalToGlobalBegin(dm, local, ADD_VALUES, y));
34: PetscCall(DMLocalToGlobalEnd(dm, local, ADD_VALUES, y));
35: PetscCall(DMRestoreLocalVector(dm, &local));
36: PetscFunctionReturn(PETSC_SUCCESS);
37: }
39: static PetscErrorCode DMGlobalToLocalSolve_project1(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx)
40: {
41: PetscInt f;
43: PetscFunctionBegin;
44: for (f = 0; f < Nf; f++) u[f] = 1.;
45: PetscFunctionReturn(PETSC_SUCCESS);
46: }
48: /*@
49: DMGlobalToLocalSolve - Solve for the global vector that is mapped to a given local vector by `DMGlobalToLocalBegin()`/`DMGlobalToLocalEnd()` with mode
50: `INSERT_VALUES`.
52: Collective
54: Input Parameters:
55: + dm - The `DM` object
56: . x - The local vector
57: - y - The global vector: the input value of this variable is used as an initial guess
59: Output Parameter:
60: . y - The least-squares solution
62: Level: advanced
64: Note:
65: It is assumed that the sum of all the local vector sizes is greater than or equal to the global vector size, so the solution is
66: a least-squares solution. It is also assumed that `DMLocalToGlobalBegin()`/`DMLocalToGlobalEnd()` with mode `ADD_VALUES` is the adjoint of the
67: global-to-local map, so that the least-squares solution may be found by the normal equations.
69: If the `DM` is of type `DMPLEX`, then `y` is the solution of $ L^T * D * L * y = L^T * D * x $, where $D$ is a diagonal mask that is 1 for every point in
70: the union of the closures of the local cells and 0 otherwise. This difference is only relevant if there are anchor points that are not in the
71: closure of any local cell (see `DMPlexGetAnchors()`/`DMPlexSetAnchors()`).
73: What is L?
75: If this solves for a global vector from a local vector why is not called `DMLocalToGlobalSolve()`?
77: .seealso: [](ch_dmbase), `DM`, `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToGlobalEnd()`, `DMPlexGetAnchors()`, `DMPlexSetAnchors()`
78: @*/
79: PetscErrorCode DMGlobalToLocalSolve(DM dm, Vec x, Vec y)
80: {
81: Mat CtC;
82: PetscInt n, N, cStart, cEnd, c;
83: PetscBool isPlex;
84: KSP ksp;
85: PC pc;
86: Vec global, mask = NULL;
87: projectConstraintsCtx ctx;
89: PetscFunctionBegin;
90: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
91: if (isPlex) {
92: /* mark points in the closure */
93: PetscCall(DMCreateLocalVector(dm, &mask));
94: PetscCall(VecSet(mask, 0.0));
95: PetscCall(DMPlexGetSimplexOrBoxCells(dm, 0, &cStart, &cEnd));
96: if (cEnd > cStart) {
97: PetscScalar *ones;
98: PetscInt numValues, i;
100: PetscCall(DMPlexVecGetClosure(dm, NULL, mask, cStart, &numValues, NULL));
101: PetscCall(PetscMalloc1(numValues, &ones));
102: for (i = 0; i < numValues; i++) ones[i] = 1.;
103: for (c = cStart; c < cEnd; c++) PetscCall(DMPlexVecSetClosure(dm, NULL, mask, c, ones, INSERT_VALUES));
104: PetscCall(PetscFree(ones));
105: }
106: } else {
107: PetscBool hasMask;
109: PetscCall(DMHasNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &hasMask));
110: if (!hasMask) {
111: PetscErrorCode (**func)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
112: void **ctx;
113: PetscInt Nf, f;
115: PetscCall(DMGetNumFields(dm, &Nf));
116: PetscCall(PetscMalloc2(Nf, &func, Nf, &ctx));
117: for (f = 0; f < Nf; ++f) {
118: func[f] = DMGlobalToLocalSolve_project1;
119: ctx[f] = NULL;
120: }
121: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
122: PetscCall(DMProjectFunctionLocal(dm, 0.0, func, ctx, INSERT_ALL_VALUES, mask));
123: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
124: PetscCall(PetscFree2(func, ctx));
125: }
126: PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
127: }
128: ctx.dm = dm;
129: ctx.mask = mask;
130: PetscCall(VecGetSize(y, &N));
131: PetscCall(VecGetLocalSize(y, &n));
132: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), &CtC));
133: PetscCall(MatSetSizes(CtC, n, n, N, N));
134: PetscCall(MatSetType(CtC, MATSHELL));
135: PetscCall(MatSetUp(CtC));
136: PetscCall(MatShellSetContext(CtC, &ctx));
137: PetscCall(MatShellSetOperation(CtC, MATOP_MULT, (PetscErrorCodeFn *)MatMult_GlobalToLocalNormal));
138: PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp));
139: PetscCall(KSPSetOperators(ksp, CtC, CtC));
140: PetscCall(KSPSetType(ksp, KSPCG));
141: PetscCall(KSPGetPC(ksp, &pc));
142: PetscCall(PCSetType(pc, PCNONE));
143: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
144: PetscCall(KSPSetUp(ksp));
145: PetscCall(DMGetGlobalVector(dm, &global));
146: PetscCall(VecSet(global, 0.));
147: if (mask) PetscCall(VecPointwiseMult(x, mask, x));
148: PetscCall(DMLocalToGlobalBegin(dm, x, ADD_VALUES, global));
149: PetscCall(DMLocalToGlobalEnd(dm, x, ADD_VALUES, global));
150: PetscCall(KSPSolve(ksp, global, y));
151: PetscCall(DMRestoreGlobalVector(dm, &global));
152: /* clean up */
153: PetscCall(KSPDestroy(&ksp));
154: PetscCall(MatDestroy(&CtC));
155: if (isPlex) {
156: PetscCall(VecDestroy(&mask));
157: } else {
158: PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
159: }
160: PetscFunctionReturn(PETSC_SUCCESS);
161: }
163: /*@C
164: DMProjectField - This projects a given function of the input fields into the function space provided by a `DM`, putting the coefficients in a global vector.
166: Collective
168: Input Parameters:
169: + dm - The `DM`
170: . time - The time
171: . U - The input field vector
172: . funcs - The functions to evaluate, one per field, see `PetscPointFn`
173: - mode - The insertion mode for values
175: Output Parameter:
176: . X - The output vector
178: Level: advanced
180: Note:
181: There are three different `DM`s that potentially interact in this function. The output `dm`, specifies the layout of the values calculates by the function.
182: The input `DM`, attached to `U`, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
183: a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the
184: auxiliary field vector, which is attached to `dm`, can also be different. It can have a different topology, number of fields, and discretizations.
186: .seealso: [](ch_dmbase), `DM`, `PetscPointFn`, `DMProjectFieldLocal()`, `DMProjectFieldLabelLocal()`, `DMProjectFunction()`, `DMComputeL2Diff()`
187: @*/
188: PetscErrorCode DMProjectField(DM dm, PetscReal time, Vec U, PetscPointFn **funcs, InsertMode mode, Vec X)
189: {
190: Vec localX, localU;
191: DM dmIn;
193: PetscFunctionBegin;
195: PetscCall(DMGetLocalVector(dm, &localX));
196: /* We currently check whether locU == locX to see if we need to apply BC */
197: if (U != X) {
198: PetscCall(VecGetDM(U, &dmIn));
199: PetscCall(DMGetLocalVector(dmIn, &localU));
200: } else {
201: dmIn = dm;
202: localU = localX;
203: }
204: PetscCall(DMGlobalToLocalBegin(dmIn, U, INSERT_VALUES, localU));
205: PetscCall(DMGlobalToLocalEnd(dmIn, U, INSERT_VALUES, localU));
206: PetscCall(DMProjectFieldLocal(dm, time, localU, funcs, mode, localX));
207: PetscCall(DMLocalToGlobalBegin(dm, localX, mode, X));
208: PetscCall(DMLocalToGlobalEnd(dm, localX, mode, X));
209: if (mode == INSERT_VALUES || mode == INSERT_ALL_VALUES || mode == INSERT_BC_VALUES) {
210: Mat cMat;
212: PetscCall(DMGetDefaultConstraints(dm, NULL, &cMat, NULL));
213: if (cMat) PetscCall(DMGlobalToLocalSolve(dm, localX, X));
214: }
215: PetscCall(DMRestoreLocalVector(dm, &localX));
216: if (U != X) PetscCall(DMRestoreLocalVector(dmIn, &localU));
217: PetscFunctionReturn(PETSC_SUCCESS);
218: }
220: /********************* Adaptive Interpolation **************************/
222: /* See the discussion of Adaptive Interpolation in manual/high_level_mg.rst */
223: PetscErrorCode DMAdaptInterpolator(DM dmc, DM dmf, Mat In, KSP smoother, Mat MF, Mat MC, Mat *InAdapt, void *user)
224: {
225: Mat globalA, AF;
226: Vec tmp;
227: const PetscScalar *af, *ac;
228: PetscScalar *A, *b, *x, *workscalar;
229: PetscReal *w, *sing, *workreal, rcond = PETSC_SMALL;
230: PetscBLASInt M, N, one = 1, irank, lwrk, info;
231: PetscInt debug = 0, rStart, rEnd, r, maxcols = 0, k, Nc, ldac, ldaf;
232: PetscBool allocVc = PETSC_FALSE;
234: PetscFunctionBegin;
235: PetscCall(PetscLogEventBegin(DM_AdaptInterpolator, dmc, dmf, 0, 0));
236: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dm_interpolator_adapt_debug", &debug, NULL));
237: PetscCall(MatGetSize(MF, NULL, &Nc));
238: PetscCall(MatDuplicate(In, MAT_SHARE_NONZERO_PATTERN, InAdapt));
239: PetscCall(MatGetOwnershipRange(In, &rStart, &rEnd));
240: #if 0
241: PetscCall(MatGetMaxRowLen(In, &maxcols));
242: #else
243: for (r = rStart; r < rEnd; ++r) {
244: PetscInt ncols;
246: PetscCall(MatGetRow(In, r, &ncols, NULL, NULL));
247: maxcols = PetscMax(maxcols, ncols);
248: PetscCall(MatRestoreRow(In, r, &ncols, NULL, NULL));
249: }
250: #endif
251: if (Nc < maxcols) PetscCall(PetscPrintf(PETSC_COMM_SELF, "The number of input vectors %" PetscInt_FMT " < %" PetscInt_FMT " the maximum number of column entries\n", Nc, maxcols));
252: for (k = 0; k < Nc && debug; ++k) {
253: char name[PETSC_MAX_PATH_LEN];
254: const char *prefix;
255: Vec vc, vf;
257: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)smoother, &prefix));
259: if (MC) {
260: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sCoarse Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
261: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
262: PetscCall(PetscObjectSetName((PetscObject)vc, name));
263: PetscCall(VecViewFromOptions(vc, NULL, "-dm_adapt_interp_view_coarse"));
264: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
265: }
266: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sFine Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
267: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
268: PetscCall(PetscObjectSetName((PetscObject)vf, name));
269: PetscCall(VecViewFromOptions(vf, NULL, "-dm_adapt_interp_view_fine"));
270: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
271: }
272: PetscCall(PetscBLASIntCast(3 * PetscMin(Nc, maxcols) + PetscMax(2 * PetscMin(Nc, maxcols), PetscMax(Nc, maxcols)), &lwrk));
273: PetscCall(PetscMalloc7(Nc * maxcols, &A, PetscMax(Nc, maxcols), &b, Nc, &w, maxcols, &x, maxcols, &sing, lwrk, &workscalar, 5 * PetscMin(Nc, maxcols), &workreal));
274: /* w_k = \frac{\HC{v_k} B_l v_k}{\HC{v_k} A_l v_k} or the inverse Rayleigh quotient, which we calculate using \frac{\HC{v_k} v_k}{\HC{v_k} B^{-1}_l A_l v_k} */
275: PetscCall(KSPGetOperators(smoother, &globalA, NULL));
277: PetscCall(MatMatMult(globalA, MF, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &AF));
278: for (k = 0; k < Nc; ++k) {
279: PetscScalar vnorm, vAnorm;
280: Vec vf;
282: w[k] = 1.0;
283: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
284: PetscCall(MatDenseGetColumnVecRead(AF, k, &tmp));
285: PetscCall(VecDot(vf, vf, &vnorm));
286: #if 0
287: PetscCall(DMGetGlobalVector(dmf, &tmp2));
288: PetscCall(KSPSolve(smoother, tmp, tmp2));
289: PetscCall(VecDot(vf, tmp2, &vAnorm));
290: PetscCall(DMRestoreGlobalVector(dmf, &tmp2));
291: #else
292: PetscCall(VecDot(vf, tmp, &vAnorm));
293: #endif
294: w[k] = PetscRealPart(vnorm) / PetscRealPart(vAnorm);
295: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
296: PetscCall(MatDenseRestoreColumnVecRead(AF, k, &tmp));
297: }
298: PetscCall(MatDestroy(&AF));
299: if (!MC) {
300: allocVc = PETSC_TRUE;
301: PetscCall(MatTransposeMatMult(In, MF, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &MC));
302: }
303: /* Solve a LS system for each fine row
304: MATT: Can we generalize to the case where Nc for the fine space
305: is different for Nc for the coarse? */
306: PetscCall(MatDenseGetArrayRead(MF, &af));
307: PetscCall(MatDenseGetLDA(MF, &ldaf));
308: PetscCall(MatDenseGetArrayRead(MC, &ac));
309: PetscCall(MatDenseGetLDA(MC, &ldac));
310: for (r = rStart; r < rEnd; ++r) {
311: PetscInt ncols, c;
312: const PetscInt *cols;
313: const PetscScalar *vals;
315: PetscCall(MatGetRow(In, r, &ncols, &cols, &vals));
316: for (k = 0; k < Nc; ++k) {
317: /* Need to fit lowest mode exactly */
318: const PetscReal wk = ((ncols == 1) && (k > 0)) ? 0.0 : PetscSqrtReal(w[k]);
320: /* b_k = \sqrt{w_k} f^{F,k}_r */
321: b[k] = wk * af[r - rStart + k * ldaf];
322: /* A_{kc} = \sqrt{w_k} f^{C,k}_c */
323: /* TODO Must pull out VecScatter from In, scatter in vc[k] values up front, and access them indirectly just as in MatMult() */
324: for (c = 0; c < ncols; ++c) {
325: /* This is element (k, c) of A */
326: A[c * Nc + k] = wk * ac[cols[c] - rStart + k * ldac];
327: }
328: }
329: PetscCall(PetscBLASIntCast(Nc, &M));
330: PetscCall(PetscBLASIntCast(ncols, &N));
331: if (debug) {
332: #if defined(PETSC_USE_COMPLEX)
333: PetscScalar *tmp;
334: PetscInt j;
336: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
337: for (j = 0; j < Nc; ++j) tmp[j] = w[j];
338: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, tmp));
339: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
340: for (j = 0; j < Nc; ++j) tmp[j] = b[j];
341: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, tmp));
342: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
343: #else
344: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, w));
345: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
346: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, b));
347: #endif
348: }
349: #if defined(PETSC_USE_COMPLEX)
350: /* ZGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, RWORK, INFO) */
351: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, workreal, &info));
352: #else
353: /* DGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, INFO) */
354: PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, &info));
355: #endif
356: PetscCheck(info >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Bad argument to GELSS");
357: PetscCheck(info <= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "SVD failed to converge");
358: if (debug) {
359: PetscCall(PetscPrintf(PETSC_COMM_SELF, "rank %" PetscBLASInt_FMT " rcond %g\n", irank, (double)rcond));
360: #if defined(PETSC_USE_COMPLEX)
361: {
362: PetscScalar *tmp;
363: PetscInt j;
365: PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
366: for (j = 0; j < PetscMin(Nc, ncols); ++j) tmp[j] = sing[j];
367: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, tmp));
368: PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
369: }
370: #else
371: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, sing));
372: #endif
373: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS old P", ncols, 1, vals));
374: PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS sol", ncols, 1, b));
375: }
376: PetscCall(MatSetValues(*InAdapt, 1, &r, ncols, cols, b, INSERT_VALUES));
377: PetscCall(MatRestoreRow(In, r, &ncols, &cols, &vals));
378: }
379: PetscCall(MatDenseRestoreArrayRead(MF, &af));
380: PetscCall(MatDenseRestoreArrayRead(MC, &ac));
381: PetscCall(PetscFree7(A, b, w, x, sing, workscalar, workreal));
382: if (allocVc) PetscCall(MatDestroy(&MC));
383: PetscCall(MatAssemblyBegin(*InAdapt, MAT_FINAL_ASSEMBLY));
384: PetscCall(MatAssemblyEnd(*InAdapt, MAT_FINAL_ASSEMBLY));
385: PetscCall(PetscLogEventEnd(DM_AdaptInterpolator, dmc, dmf, 0, 0));
386: PetscFunctionReturn(PETSC_SUCCESS);
387: }
389: PetscErrorCode DMCheckInterpolator(DM dmf, Mat In, Mat MC, Mat MF, PetscReal tol)
390: {
391: Vec tmp;
392: PetscReal norminf, norm2, maxnorminf = 0.0, maxnorm2 = 0.0;
393: PetscInt k, Nc;
395: PetscFunctionBegin;
396: PetscCall(DMGetGlobalVector(dmf, &tmp));
397: PetscCall(MatViewFromOptions(In, NULL, "-dm_interpolator_adapt_error"));
398: PetscCall(MatGetSize(MF, NULL, &Nc));
399: for (k = 0; k < Nc; ++k) {
400: Vec vc, vf;
402: PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
403: PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
404: PetscCall(MatMult(In, vc, tmp));
405: PetscCall(VecAXPY(tmp, -1.0, vf));
406: PetscCall(VecViewFromOptions(vc, NULL, "-dm_interpolator_adapt_error"));
407: PetscCall(VecViewFromOptions(vf, NULL, "-dm_interpolator_adapt_error"));
408: PetscCall(VecViewFromOptions(tmp, NULL, "-dm_interpolator_adapt_error"));
409: PetscCall(VecNorm(tmp, NORM_INFINITY, &norminf));
410: PetscCall(VecNorm(tmp, NORM_2, &norm2));
411: maxnorminf = PetscMax(maxnorminf, norminf);
412: maxnorm2 = PetscMax(maxnorm2, norm2);
413: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)dmf), "Coarse vec %" PetscInt_FMT " ||vf - P vc||_\\infty %g, ||vf - P vc||_2 %g\n", k, (double)norminf, (double)norm2));
414: PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
415: PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
416: }
417: PetscCall(DMRestoreGlobalVector(dmf, &tmp));
418: PetscCheck(maxnorm2 <= tol, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_WRONG, "max_k ||vf_k - P vc_k||_2 %g > tol %g", (double)maxnorm2, (double)tol);
419: PetscFunctionReturn(PETSC_SUCCESS);
420: }
422: // Project particles to field
423: // M_f u_f = M_p u_p
424: // u_f = M^{-1}_f M_p u_p
425: static PetscErrorCode DMSwarmProjectField_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
426: {
427: KSP ksp;
428: Mat M_f, M_p; // TODO Should cache these
429: Vec rhs;
430: const char *prefix;
432: PetscFunctionBegin;
433: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
434: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
435: PetscCall(DMGetGlobalVector(dm, &rhs));
436: PetscCall(MatMultTranspose(M_p, u_p, rhs));
438: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
439: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
440: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
441: PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
442: PetscCall(KSPSetFromOptions(ksp));
444: PetscCall(KSPSetOperators(ksp, M_f, M_f));
445: PetscCall(KSPSolve(ksp, rhs, u_f));
447: PetscCall(DMRestoreGlobalVector(dm, &rhs));
448: PetscCall(KSPDestroy(&ksp));
449: PetscCall(MatDestroy(&M_f));
450: PetscCall(MatDestroy(&M_p));
451: PetscFunctionReturn(PETSC_SUCCESS);
452: }
454: // Project field to particles
455: // M_p u_p = M_f u_f
456: // u_p = M^+_p M_f u_f
457: static PetscErrorCode DMSwarmProjectParticles_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
458: {
459: KSP ksp;
460: PC pc;
461: Mat M_f, M_p, PM_p;
462: Vec rhs;
463: PetscBool isBjacobi;
464: const char *prefix;
466: PetscFunctionBegin;
467: PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
468: PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
469: PetscCall(DMGetGlobalVector(dm, &rhs));
470: PetscCall(MatMult(M_f, u_f, rhs));
472: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
473: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
474: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
475: PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
476: PetscCall(KSPSetFromOptions(ksp));
478: PetscCall(KSPGetPC(ksp, &pc));
479: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
480: if (isBjacobi) {
481: PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PM_p));
482: } else {
483: PM_p = M_p;
484: PetscCall(PetscObjectReference((PetscObject)PM_p));
485: }
486: PetscCall(KSPSetOperators(ksp, M_p, PM_p));
487: PetscCall(KSPSolveTranspose(ksp, rhs, u_p));
489: PetscCall(DMRestoreGlobalVector(dm, &rhs));
490: PetscCall(KSPDestroy(&ksp));
491: PetscCall(MatDestroy(&M_f));
492: PetscCall(MatDestroy(&M_p));
493: PetscCall(MatDestroy(&PM_p));
494: PetscFunctionReturn(PETSC_SUCCESS);
495: }
497: static PetscErrorCode DMSwarmProjectFields_Plex_Internal(DM sw, DM dm, PetscInt Nf, const char *fieldnames[], Vec vec, ScatterMode mode)
498: {
499: PetscDS ds;
500: Vec u;
501: PetscInt f = 0, bs, *Nc;
503: PetscFunctionBegin;
504: PetscCall(DMGetDS(dm, &ds));
505: PetscCall(PetscDSGetComponents(ds, &Nc));
506: PetscCall(PetscCitationsRegister(SwarmProjCitation, &SwarmProjcite));
507: PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Currently supported only for a single field");
508: PetscCall(DMSwarmVectorDefineFields(sw, Nf, fieldnames));
509: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fieldnames[f], &u));
510: PetscCall(VecGetBlockSize(u, &bs));
511: PetscCheck(Nc[f] == bs, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Field %" PetscInt_FMT " components %" PetscInt_FMT " != %" PetscInt_FMT " blocksize for swarm field %s", f, Nc[f], bs, fieldnames[f]);
512: if (mode == SCATTER_FORWARD) {
513: PetscCall(DMSwarmProjectField_Conservative_PLEX(sw, dm, u, vec));
514: } else {
515: PetscCall(DMSwarmProjectParticles_Conservative_PLEX(sw, dm, u, vec));
516: }
517: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fieldnames[0], &u));
518: PetscFunctionReturn(PETSC_SUCCESS);
519: }
521: static PetscErrorCode DMSwarmProjectField_ApproxQ1_DA_2D(DM swarm, PetscReal *swarm_field, DM dm, Vec v_field)
522: {
523: DMSwarmCellDM celldm;
524: Vec v_field_l, denom_l, coor_l, denom;
525: PetscScalar *_field_l, *_denom_l;
526: PetscInt k, p, e, npoints, nel, npe, Nfc;
527: PetscInt *mpfield_cell;
528: PetscReal *mpfield_coor;
529: const PetscInt *element_list;
530: const PetscInt *element;
531: PetscScalar xi_p[2], Ni[4];
532: const PetscScalar *_coor;
533: const char **coordFields, *cellid;
535: PetscFunctionBegin;
536: PetscCall(VecZeroEntries(v_field));
538: PetscCall(DMGetLocalVector(dm, &v_field_l));
539: PetscCall(DMGetGlobalVector(dm, &denom));
540: PetscCall(DMGetLocalVector(dm, &denom_l));
541: PetscCall(VecZeroEntries(v_field_l));
542: PetscCall(VecZeroEntries(denom));
543: PetscCall(VecZeroEntries(denom_l));
545: PetscCall(VecGetArray(v_field_l, &_field_l));
546: PetscCall(VecGetArray(denom_l, &_denom_l));
548: PetscCall(DMGetCoordinatesLocal(dm, &coor_l));
549: PetscCall(VecGetArrayRead(coor_l, &_coor));
551: PetscCall(DMSwarmGetCellDMActive(swarm, &celldm));
552: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
553: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
554: PetscCall(DMSwarmCellDMGetCellID(celldm, &cellid));
556: PetscCall(DMDAGetElements(dm, &nel, &npe, &element_list));
557: PetscCall(DMSwarmGetLocalSize(swarm, &npoints));
558: PetscCall(DMSwarmGetField(swarm, coordFields[0], NULL, NULL, (void **)&mpfield_coor));
559: PetscCall(DMSwarmGetField(swarm, cellid, NULL, NULL, (void **)&mpfield_cell));
561: for (p = 0; p < npoints; p++) {
562: PetscReal *coor_p;
563: const PetscScalar *x0;
564: const PetscScalar *x2;
565: PetscScalar dx[2];
567: e = mpfield_cell[p];
568: coor_p = &mpfield_coor[2 * p];
569: element = &element_list[npe * e];
571: /* compute local coordinates: (xp-x0)/dx = (xip+1)/2 */
572: x0 = &_coor[2 * element[0]];
573: x2 = &_coor[2 * element[2]];
575: dx[0] = x2[0] - x0[0];
576: dx[1] = x2[1] - x0[1];
578: xi_p[0] = 2.0 * (coor_p[0] - x0[0]) / dx[0] - 1.0;
579: xi_p[1] = 2.0 * (coor_p[1] - x0[1]) / dx[1] - 1.0;
581: /* evaluate basis functions */
582: Ni[0] = 0.25 * (1.0 - xi_p[0]) * (1.0 - xi_p[1]);
583: Ni[1] = 0.25 * (1.0 + xi_p[0]) * (1.0 - xi_p[1]);
584: Ni[2] = 0.25 * (1.0 + xi_p[0]) * (1.0 + xi_p[1]);
585: Ni[3] = 0.25 * (1.0 - xi_p[0]) * (1.0 + xi_p[1]);
587: for (k = 0; k < npe; k++) {
588: _field_l[element[k]] += Ni[k] * swarm_field[p];
589: _denom_l[element[k]] += Ni[k];
590: }
591: }
593: PetscCall(DMSwarmRestoreField(swarm, cellid, NULL, NULL, (void **)&mpfield_cell));
594: PetscCall(DMSwarmRestoreField(swarm, coordFields[0], NULL, NULL, (void **)&mpfield_coor));
595: PetscCall(DMDARestoreElements(dm, &nel, &npe, &element_list));
596: PetscCall(VecRestoreArrayRead(coor_l, &_coor));
597: PetscCall(VecRestoreArray(v_field_l, &_field_l));
598: PetscCall(VecRestoreArray(denom_l, &_denom_l));
600: PetscCall(DMLocalToGlobalBegin(dm, v_field_l, ADD_VALUES, v_field));
601: PetscCall(DMLocalToGlobalEnd(dm, v_field_l, ADD_VALUES, v_field));
602: PetscCall(DMLocalToGlobalBegin(dm, denom_l, ADD_VALUES, denom));
603: PetscCall(DMLocalToGlobalEnd(dm, denom_l, ADD_VALUES, denom));
605: PetscCall(VecPointwiseDivide(v_field, v_field, denom));
607: PetscCall(DMRestoreLocalVector(dm, &v_field_l));
608: PetscCall(DMRestoreLocalVector(dm, &denom_l));
609: PetscCall(DMRestoreGlobalVector(dm, &denom));
610: PetscFunctionReturn(PETSC_SUCCESS);
611: }
613: static PetscErrorCode DMSwarmProjectFields_DA_Internal(DM swarm, DM celldm, PetscInt nfields, DMSwarmDataField dfield[], Vec vecs[], ScatterMode mode)
614: {
615: PetscInt f, dim;
616: DMDAElementType etype;
618: PetscFunctionBegin;
619: PetscCall(DMDAGetElementType(celldm, &etype));
620: PetscCheck(etype != DMDA_ELEMENT_P1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Only Q1 DMDA supported");
621: PetscCheck(mode == SCATTER_FORWARD, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Mapping the continuum to particles is not currently supported for DMDA");
623: PetscCall(DMGetDimension(swarm, &dim));
624: switch (dim) {
625: case 2:
626: for (f = 0; f < nfields; f++) {
627: PetscReal *swarm_field;
629: PetscCall(DMSwarmDataFieldGetEntries(dfield[f], (void **)&swarm_field));
630: PetscCall(DMSwarmProjectField_ApproxQ1_DA_2D(swarm, swarm_field, celldm, vecs[f]));
631: }
632: break;
633: case 3:
634: SETERRQ(PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "No support for 3D");
635: default:
636: break;
637: }
638: PetscFunctionReturn(PETSC_SUCCESS);
639: }
641: /*@C
642: DMSwarmProjectFields - Project a set of swarm fields onto another `DM`
644: Collective
646: Input Parameters:
647: + sw - the `DMSWARM`
648: . dm - the `DM`, or `NULL` to use the cell `DM`
649: . nfields - the number of swarm fields to project
650: . fieldnames - the textual names of the swarm fields to project
651: . fields - an array of `Vec`'s of length nfields
652: - mode - if `SCATTER_FORWARD` then map particles to the continuum, and if `SCATTER_REVERSE` map the continuum to particles
654: Level: beginner
656: Notes:
657: Currently, there are two available projection methods. The first is conservative projection, used for a `DMPLEX` cell `DM`.
658: The second is the averaging which is used for a `DMDA` cell `DM`
660: $$
661: \phi_i = \sum_{p=0}^{np} N_i(x_p) \phi_p dJ / \sum_{p=0}^{np} N_i(x_p) dJ
662: $$
664: where $\phi_p $ is the swarm field at point $p$, $N_i()$ is the cell `DM` basis function at vertex $i$, $dJ$ is the determinant of the cell Jacobian and
665: $\phi_i$ is the projected vertex value of the field $\phi$.
667: The user is responsible for destroying both the array and the individual `Vec` objects.
669: For the `DMPLEX` case, there is only a single vector, so the field layout in the `DMPLEX` must match the requested fields from the `DMSwarm`.
671: For averaging projection, nly swarm fields registered with data type of `PETSC_REAL` can be projected onto the cell `DM`, and only swarm fields of block size = 1 can currently be projected.
673: .seealso: [](ch_dmbase), `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType`
674: @*/
675: PetscErrorCode DMSwarmProjectFields(DM sw, DM dm, PetscInt nfields, const char *fieldnames[], Vec fields[], ScatterMode mode)
676: {
677: DM_Swarm *swarm = (DM_Swarm *)sw->data;
678: DMSwarmDataField *gfield;
679: PetscBool isDA, isPlex;
680: MPI_Comm comm;
682: PetscFunctionBegin;
683: DMSWARMPICVALID(sw);
684: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
685: if (!dm) PetscCall(DMSwarmGetCellDM(sw, &dm));
686: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &isDA));
687: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
688: PetscCall(PetscMalloc1(nfields, &gfield));
689: for (PetscInt f = 0; f < nfields; ++f) PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldnames[f], &gfield[f]));
691: if (isDA) {
692: for (PetscInt f = 0; f < nfields; f++) {
693: PetscCheck(gfield[f]->petsc_type == PETSC_REAL, comm, PETSC_ERR_SUP, "Projection only valid for fields using a data type = PETSC_REAL");
694: PetscCheck(gfield[f]->bs == 1, comm, PETSC_ERR_SUP, "Projection only valid for fields with block size = 1");
695: }
696: PetscCall(DMSwarmProjectFields_DA_Internal(sw, dm, nfields, gfield, fields, mode));
697: } else if (isPlex) {
698: PetscInt Nf;
700: PetscCall(DMGetNumFields(dm, &Nf));
701: PetscCheck(Nf == nfields, comm, PETSC_ERR_ARG_WRONG, "Number of DM fields %" PetscInt_FMT " != %" PetscInt_FMT " number of requested Swarm fields", Nf, nfields);
702: PetscCall(DMSwarmProjectFields_Plex_Internal(sw, dm, nfields, fieldnames, fields[0], mode));
703: } else SETERRQ(PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only supported for cell DMs of type DMDA and DMPLEX");
705: PetscCall(PetscFree(gfield));
706: PetscFunctionReturn(PETSC_SUCCESS);
707: }
709: // Project weak divergence of particles to field
710: // \int_X psi_i div u_f = \int_X psi_i div u_p
711: // \int_X grad psi_i . \sum_j u_f \psi_j = \int_X grad psi_i . \sum_p u_p \delta(x - x_p)
712: // D_f u_f = D_p u_p
713: // u_f = D^+_f D_p u_p
714: static PetscErrorCode DMSwarmProjectGradientField_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
715: {
716: DM gdm;
717: KSP ksp;
718: Mat D_f, D_p; // TODO Should cache these
719: Vec rhs;
720: const char *prefix;
722: PetscFunctionBegin;
723: PetscCall(VecGetDM(u_f, &gdm));
724: PetscCall(DMCreateGradientMatrix(dm, gdm, &D_f));
725: PetscCall(DMCreateGradientMatrix(sw, dm, &D_p));
726: PetscCall(DMGetGlobalVector(dm, &rhs));
727: PetscCall(PetscObjectSetName((PetscObject)rhs, "D u"));
728: PetscCall(MatMultTranspose(D_p, u_p, rhs));
729: PetscCall(VecViewFromOptions(rhs, NULL, "-rhs_view"));
731: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
732: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
733: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
734: PetscCall(KSPAppendOptionsPrefix(ksp, "gptof_"));
735: PetscCall(KSPSetFromOptions(ksp));
737: PetscCall(KSPSetOperators(ksp, D_f, D_f));
738: PetscCall(KSPSolveTranspose(ksp, rhs, u_f));
740: PetscCall(MatMultTranspose(D_f, u_f, rhs));
741: PetscCall(VecViewFromOptions(rhs, NULL, "-rhs_view"));
743: PetscCall(DMRestoreGlobalVector(dm, &rhs));
744: PetscCall(KSPDestroy(&ksp));
745: PetscCall(MatDestroy(&D_f));
746: PetscCall(MatDestroy(&D_p));
747: PetscFunctionReturn(PETSC_SUCCESS);
748: }
750: // Project weak divergence of field to particles
751: // D_p u_p = D_f u_f
752: // u_p = D^+_p D_f u_f
753: static PetscErrorCode DMSwarmProjectGradientParticles_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
754: {
755: KSP ksp;
756: PC pc;
757: Mat D_f, D_p, PD_p;
758: Vec rhs;
759: PetscBool isBjacobi;
760: const char *prefix;
762: PetscFunctionBegin;
763: PetscCall(DMCreateGradientMatrix(dm, dm, &D_f));
764: PetscCall(DMCreateGradientMatrix(sw, dm, &D_p));
765: PetscCall(DMGetGlobalVector(dm, &rhs));
766: PetscCall(MatMult(D_f, u_f, rhs));
768: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
769: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
770: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
771: PetscCall(KSPAppendOptionsPrefix(ksp, "gftop_"));
772: PetscCall(KSPSetFromOptions(ksp));
774: PetscCall(KSPGetPC(ksp, &pc));
775: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
776: if (isBjacobi) {
777: PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PD_p));
778: } else {
779: PD_p = D_p;
780: PetscCall(PetscObjectReference((PetscObject)PD_p));
781: }
782: PetscCall(KSPSetOperators(ksp, D_p, PD_p));
783: PetscCall(KSPSolveTranspose(ksp, rhs, u_p));
785: PetscCall(DMRestoreGlobalVector(dm, &rhs));
786: PetscCall(KSPDestroy(&ksp));
787: PetscCall(MatDestroy(&D_f));
788: PetscCall(MatDestroy(&D_p));
789: PetscCall(MatDestroy(&PD_p));
790: PetscFunctionReturn(PETSC_SUCCESS);
791: }
793: static PetscErrorCode DMSwarmProjectGradientFields_Plex_Internal(DM sw, DM dm, PetscInt Nf, const char *fieldnames[], Vec vec, ScatterMode mode)
794: {
795: PetscDS ds;
796: Vec u;
797: PetscInt f = 0, cdim, bs, *Nc;
799: PetscFunctionBegin;
800: PetscCall(DMGetCoordinateDim(dm, &cdim));
801: PetscCall(DMGetDS(dm, &ds));
802: PetscCall(PetscDSGetComponents(ds, &Nc));
803: PetscCall(PetscCitationsRegister(SwarmProjCitation, &SwarmProjcite));
804: PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Currently supported only for a single field");
805: PetscCall(DMSwarmVectorDefineFields(sw, Nf, fieldnames));
806: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fieldnames[f], &u));
807: PetscCall(VecGetBlockSize(u, &bs));
808: PetscCheck(Nc[f] * cdim == bs, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Field %" PetscInt_FMT " components %" PetscInt_FMT " * %" PetscInt_FMT " coordinate dim != %" PetscInt_FMT " blocksize for swarm field %s", f, Nc[f], cdim, bs, fieldnames[f]);
809: if (mode == SCATTER_FORWARD) {
810: PetscCall(DMSwarmProjectGradientField_Conservative_PLEX(sw, dm, u, vec));
811: } else {
812: PetscCall(DMSwarmProjectGradientParticles_Conservative_PLEX(sw, dm, u, vec));
813: }
814: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fieldnames[0], &u));
815: PetscFunctionReturn(PETSC_SUCCESS);
816: }
818: PetscErrorCode DMSwarmProjectGradientFields(DM sw, DM dm, PetscInt nfields, const char *fieldnames[], Vec fields[], ScatterMode mode)
819: {
820: PetscBool isPlex;
821: MPI_Comm comm;
823: PetscFunctionBegin;
824: DMSWARMPICVALID(sw);
825: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
826: if (!dm) PetscCall(DMSwarmGetCellDM(sw, &dm));
827: PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
828: if (isPlex) {
829: PetscInt Nf;
831: PetscCall(DMGetNumFields(dm, &Nf));
832: PetscCheck(Nf == nfields, comm, PETSC_ERR_ARG_WRONG, "Number of DM fields %" PetscInt_FMT " != %" PetscInt_FMT " number of requested Swarm fields", Nf, nfields);
833: PetscCall(DMSwarmProjectGradientFields_Plex_Internal(sw, dm, nfields, fieldnames, fields[0], mode));
834: } else SETERRQ(PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only supported for cell DMs of type DMPLEX");
835: PetscFunctionReturn(PETSC_SUCCESS);
836: }
838: /*
839: InitializeParticles_Regular - Initialize a regular grid of particles in each cell
841: Input Parameters:
842: + sw - The `DMSWARM`
843: - n - The number of particles per dimension per species
845: Notes:
846: This functions sets the species, cellid, and cell DM coordinates.
848: It places n^d particles per species in each cell of the cell DM.
849: */
850: static PetscErrorCode InitializeParticles_Regular(DM sw, PetscInt n)
851: {
852: DM_Swarm *swarm = (DM_Swarm *)sw->data;
853: DM dm;
854: DMSwarmCellDM celldm;
855: PetscInt dim, Ns, Npc, Np, cStart, cEnd, debug;
856: PetscBool flg;
857: MPI_Comm comm;
859: PetscFunctionBegin;
860: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
862: PetscOptionsBegin(comm, "", "DMSwarm Options", "DMSWARM");
863: PetscCall(DMSwarmGetNumSpecies(sw, &Ns));
864: PetscCall(PetscOptionsInt("-dm_swarm_num_species", "The number of species", "DMSwarmSetNumSpecies", Ns, &Ns, &flg));
865: if (flg) PetscCall(DMSwarmSetNumSpecies(sw, Ns));
866: PetscCall(PetscOptionsBoundedInt("-dm_swarm_print_coords", "Debug output level for particle coordinate computations", "InitializeParticles", 0, &swarm->printCoords, NULL, 0));
867: PetscCall(PetscOptionsBoundedInt("-dm_swarm_print_weights", "Debug output level for particle weight computations", "InitializeWeights", 0, &swarm->printWeights, NULL, 0));
868: PetscOptionsEnd();
869: debug = swarm->printCoords;
871: // n^d particle per cell on the grid
872: PetscCall(DMSwarmGetCellDM(sw, &dm));
873: PetscCall(DMGetDimension(dm, &dim));
874: PetscCheck(!(dim % 2), comm, PETSC_ERR_SUP, "We only support even dimension, not %" PetscInt_FMT, dim);
875: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
876: Npc = Ns * PetscPowInt(n, dim);
877: Np = (cEnd - cStart) * Npc;
878: PetscCall(DMSwarmSetLocalSizes(sw, Np, 0));
879: if (debug) {
880: PetscInt gNp;
881: PetscCallMPI(MPIU_Allreduce(&Np, &gNp, 1, MPIU_INT, MPIU_SUM, comm));
882: PetscCall(PetscPrintf(comm, "Global Np = %" PetscInt_FMT "\n", gNp));
883: }
884: PetscCall(PetscPrintf(comm, "Regular layout using %" PetscInt_FMT " particles per cell\n", Npc));
886: // Set species and cellid
887: {
888: const char *cellidName;
889: PetscInt *species, *cellid;
891: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
892: PetscCall(DMSwarmCellDMGetCellID(celldm, &cellidName));
893: PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species));
894: PetscCall(DMSwarmGetField(sw, cellidName, NULL, NULL, (void **)&cellid));
895: for (PetscInt c = 0, p = 0; c < cEnd - cStart; ++c) {
896: for (PetscInt s = 0; s < Ns; ++s) {
897: for (PetscInt q = 0; q < Npc / Ns; ++q, ++p) {
898: species[p] = s;
899: cellid[p] = c;
900: }
901: }
902: }
903: PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species));
904: PetscCall(DMSwarmRestoreField(sw, cellidName, NULL, NULL, (void **)&cellid));
905: }
907: // Set particle coordinates
908: {
909: PetscReal *x, *v;
910: const char **coordNames;
911: PetscInt Ncoord;
912: const PetscInt xdim = dim / 2, vdim = dim / 2;
914: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncoord, &coordNames));
915: PetscCheck(Ncoord == 2, comm, PETSC_ERR_SUP, "We only support regular layout for 2 coordinate fields, not %" PetscInt_FMT, Ncoord);
916: PetscCall(DMSwarmGetField(sw, coordNames[0], NULL, NULL, (void **)&x));
917: PetscCall(DMSwarmGetField(sw, coordNames[1], NULL, NULL, (void **)&v));
918: PetscCall(DMSwarmSortGetAccess(sw));
919: PetscCall(DMGetCoordinatesLocalSetUp(dm));
920: for (PetscInt c = 0; c < cEnd - cStart; ++c) {
921: const PetscInt cell = c + cStart;
922: const PetscScalar *a;
923: PetscScalar *coords;
924: PetscReal lower[6], upper[6];
925: PetscBool isDG;
926: PetscInt *pidx, npc, Nc;
928: PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &npc, &pidx));
929: PetscCheck(Npc == npc, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid number of points per cell %" PetscInt_FMT " != %" PetscInt_FMT, npc, Npc);
930: PetscCall(DMPlexGetCellCoordinates(dm, cell, &isDG, &Nc, &a, &coords));
931: for (PetscInt d = 0; d < dim; ++d) {
932: lower[d] = PetscRealPart(coords[0 * dim + d]);
933: upper[d] = PetscRealPart(coords[0 * dim + d]);
934: }
935: for (PetscInt i = 1; i < Nc / dim; ++i) {
936: for (PetscInt d = 0; d < dim; ++d) {
937: lower[d] = PetscMin(lower[d], PetscRealPart(coords[i * dim + d]));
938: upper[d] = PetscMax(upper[d], PetscRealPart(coords[i * dim + d]));
939: }
940: }
941: for (PetscInt s = 0; s < Ns; ++s) {
942: for (PetscInt q = 0; q < Npc / Ns; ++q) {
943: const PetscInt p = pidx[q * Ns + s];
944: PetscInt xi[3], vi[3];
946: xi[0] = q % n;
947: xi[1] = (q / n) % n;
948: xi[2] = (q / PetscSqr(n)) % n;
949: for (PetscInt d = 0; d < xdim; ++d) x[p * xdim + d] = lower[d] + (xi[d] + 0.5) * (upper[d] - lower[d]) / n;
950: vi[0] = (q / PetscPowInt(n, xdim)) % n;
951: vi[1] = (q / PetscPowInt(n, xdim + 1)) % n;
952: vi[2] = (q / PetscPowInt(n, xdim + 2));
953: for (PetscInt d = 0; d < vdim; ++d) v[p * vdim + d] = lower[xdim + d] + (vi[d] + 0.5) * (upper[xdim + d] - lower[xdim + d]) / n;
954: if (debug > 1) {
955: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Particle %4" PetscInt_FMT " ", p));
956: PetscCall(PetscPrintf(PETSC_COMM_SELF, " x: ("));
957: for (PetscInt d = 0; d < xdim; ++d) {
958: if (d > 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", "));
959: PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)PetscRealPart(x[p * xdim + d])));
960: }
961: PetscCall(PetscPrintf(PETSC_COMM_SELF, ") v:("));
962: for (PetscInt d = 0; d < vdim; ++d) {
963: if (d > 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", "));
964: PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)PetscRealPart(v[p * vdim + d])));
965: }
966: PetscCall(PetscPrintf(PETSC_COMM_SELF, ")\n"));
967: }
968: }
969: }
970: PetscCall(DMPlexRestoreCellCoordinates(dm, cell, &isDG, &Nc, &a, &coords));
971: PetscCall(DMSwarmSortRestorePointsPerCell(sw, c, &Npc, &pidx));
972: }
973: PetscCall(DMSwarmSortRestoreAccess(sw));
974: PetscCall(DMSwarmRestoreField(sw, coordNames[0], NULL, NULL, (void **)&x));
975: PetscCall(DMSwarmRestoreField(sw, coordNames[1], NULL, NULL, (void **)&v));
976: }
977: PetscFunctionReturn(PETSC_SUCCESS);
978: }
980: /*
981: @article{MyersColellaVanStraalen2017,
982: title = {A 4th-order particle-in-cell method with phase-space remapping for the {Vlasov-Poisson} equation},
983: author = {Andrew Myers and Phillip Colella and Brian Van Straalen},
984: journal = {SIAM Journal on Scientific Computing},
985: volume = {39},
986: issue = {3},
987: pages = {B467-B485},
988: doi = {10.1137/16M105962X},
989: issn = {10957197},
990: year = {2017},
991: }
992: */
993: static PetscErrorCode W_3_Interpolation_Private(PetscReal x, PetscReal *w)
994: {
995: const PetscReal ax = PetscAbsReal(x);
997: PetscFunctionBegin;
998: *w = 0.;
999: // W_3(x) = 1 - 5/2 |x|^2 + 3/2 |x|^3 0 \le |x| \e 1
1000: if (ax <= 1.) *w = 1. - 2.5 * PetscSqr(ax) + 1.5 * PetscSqr(ax) * ax;
1001: // 1/2 (2 - |x|)^2 (1 - |x|) 1 \le |x| \le 2
1002: else if (ax <= 2.) *w = 0.5 * PetscSqr(2. - ax) * (1. - ax);
1003: //PetscCall(PetscPrintf(PETSC_COMM_SELF, " W_3 %g --> %g\n", x, *w));
1004: PetscFunctionReturn(PETSC_SUCCESS);
1005: }
1007: // Right now, we will assume that the spatial and velocity grids are regular, which will speed up point location immensely
1008: static PetscErrorCode DMSwarmRemap_Colella_Internal(DM sw, DM *rsw)
1009: {
1010: DM xdm, vdm;
1011: DMSwarmCellDM celldm;
1012: PetscReal xmin[3], xmax[3], vmin[3], vmax[3];
1013: PetscInt xend[3], vend[3];
1014: PetscReal *x, *v, *w, *rw;
1015: PetscReal hx[3], hv[3];
1016: PetscInt dim, xcdim, vcdim, xcStart, xcEnd, vcStart, vcEnd, Np, Nfc;
1017: PetscInt debug = ((DM_Swarm *)sw->data)->printWeights;
1018: const char **coordFields;
1020: PetscFunctionBegin;
1021: PetscCall(DMGetDimension(sw, &dim));
1022: PetscCall(DMSwarmGetCellDM(sw, &xdm));
1023: PetscCall(DMGetCoordinateDim(xdm, &xcdim));
1024: // Create a new centroid swarm without weights
1025: PetscCall(DMSwarmDuplicate(sw, rsw));
1026: PetscCall(DMSwarmGetCellDMActive(*rsw, &celldm));
1027: PetscCall(DMSwarmSetCellDMActive(*rsw, "remap"));
1028: PetscCall(InitializeParticles_Regular(*rsw, 1));
1029: PetscCall(DMSwarmSetCellDMActive(*rsw, ((PetscObject)celldm)->name));
1030: PetscCall(DMSwarmGetLocalSize(*rsw, &Np));
1031: // Assume quad mesh and calculate cell diameters (TODO this could be more robust)
1032: {
1033: const PetscScalar *array;
1034: PetscScalar *coords;
1035: PetscBool isDG;
1036: PetscInt Nc;
1038: PetscCall(DMGetBoundingBox(xdm, xmin, xmax));
1039: PetscCall(DMPlexGetHeightStratum(xdm, 0, &xcStart, &xcEnd));
1040: PetscCall(DMPlexGetCellCoordinates(xdm, xcStart, &isDG, &Nc, &array, &coords));
1041: hx[0] = PetscRealPart(coords[1 * xcdim + 0] - coords[0 * xcdim + 0]);
1042: hx[1] = xcdim > 1 ? PetscRealPart(coords[2 * xcdim + 1] - coords[1 * xcdim + 1]) : 1.;
1043: PetscCall(DMPlexRestoreCellCoordinates(xdm, xcStart, &isDG, &Nc, &array, &coords));
1044: PetscCall(PetscObjectQuery((PetscObject)sw, "__vdm__", (PetscObject *)&vdm));
1045: PetscCall(DMGetCoordinateDim(vdm, &vcdim));
1046: PetscCall(DMGetBoundingBox(vdm, vmin, vmax));
1047: PetscCall(DMPlexGetHeightStratum(vdm, 0, &vcStart, &vcEnd));
1048: PetscCall(DMPlexGetCellCoordinates(vdm, vcStart, &isDG, &Nc, &array, &coords));
1049: hv[0] = PetscRealPart(coords[1 * vcdim + 0] - coords[0 * vcdim + 0]);
1050: hv[1] = vcdim > 1 ? PetscRealPart(coords[2 * vcdim + 1] - coords[1 * vcdim + 1]) : 1.;
1051: PetscCall(DMPlexRestoreCellCoordinates(vdm, vcStart, &isDG, &Nc, &array, &coords));
1053: PetscCheck(dim == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "Only support 1D distributions at this time");
1054: xend[0] = xcEnd - xcStart;
1055: xend[1] = 1;
1056: vend[0] = vcEnd - vcStart;
1057: vend[1] = 1;
1058: if (debug > 1)
1059: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Phase Grid (%g, %g, %g, %g) (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", (double)PetscRealPart(hx[0]), (double)PetscRealPart(hx[1]), (double)PetscRealPart(hv[0]), (double)PetscRealPart(hv[1]), xend[0], xend[1], vend[0], vend[1]));
1060: }
1061: // Iterate over particles in the original swarm
1062: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1063: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
1064: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
1065: PetscCall(DMSwarmGetField(sw, coordFields[0], NULL, NULL, (void **)&x));
1066: PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v));
1067: PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&w));
1068: PetscCall(DMSwarmGetField(*rsw, "w_q", NULL, NULL, (void **)&rw));
1069: PetscCall(DMSwarmSortGetAccess(sw));
1070: PetscCall(DMSwarmSortGetAccess(*rsw));
1071: PetscCall(DMGetBoundingBox(vdm, vmin, vmax));
1072: PetscCall(DMGetCoordinatesLocalSetUp(xdm));
1073: for (PetscInt i = 0; i < Np; ++i) rw[i] = 0.;
1074: for (PetscInt c = 0; c < xcEnd - xcStart; ++c) {
1075: PetscInt *pidx, Npc;
1076: PetscInt *rpidx, rNpc;
1078: PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx));
1079: for (PetscInt q = 0; q < Npc; ++q) {
1080: const PetscInt p = pidx[q];
1081: const PetscReal wp = w[p];
1082: PetscReal Wx[3], Wv[3];
1083: PetscInt xs[3], vs[3];
1085: // Determine the containing cell
1086: for (PetscInt d = 0; d < dim; ++d) {
1087: const PetscReal xp = x[p * dim + d];
1088: const PetscReal vp = v[p * dim + d];
1090: xs[d] = PetscFloorReal((xp - xmin[d]) / hx[d]);
1091: vs[d] = PetscFloorReal((vp - vmin[d]) / hv[d]);
1092: }
1093: // Loop over all grid points within 2 spacings of the particle
1094: if (debug > 2) {
1095: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Interpolating particle %" PetscInt_FMT " wt %g (%g, %g, %g, %g) (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", p, (double)wp, (double)PetscRealPart(x[p * dim + 0]), xcdim > 1 ? (double)PetscRealPart(x[p * xcdim + 1]) : 0., (double)PetscRealPart(v[p * vcdim + 0]), vcdim > 1 ? (double)PetscRealPart(v[p * vcdim + 1]) : 0., xs[0], xs[1], vs[0], vs[1]));
1096: }
1097: for (PetscInt xi = xs[0] - 1; xi < xs[0] + 3; ++xi) {
1098: // Treat xi as periodic
1099: const PetscInt xip = xi < 0 ? xi + xend[0] : (xi >= xend[0] ? xi - xend[0] : xi);
1100: PetscCall(W_3_Interpolation_Private((xmin[0] + (xi + 0.5) * hx[0] - x[p * dim + 0]) / hx[0], &Wx[0]));
1101: for (PetscInt xj = PetscMax(xs[1] - 1, 0); xj < PetscMin(xs[1] + 3, xend[1]); ++xj) {
1102: if (xcdim > 1) PetscCall(W_3_Interpolation_Private((xmin[1] + (xj + 0.5) * hx[1] - x[p * dim + 1]) / hx[1], &Wx[1]));
1103: else Wx[1] = 1.;
1104: for (PetscInt vi = PetscMax(vs[0] - 1, 0); vi < PetscMin(vs[0] + 3, vend[0]); ++vi) {
1105: PetscCall(W_3_Interpolation_Private((vmin[0] + (vi + 0.5) * hv[0] - v[p * dim + 0]) / hv[0], &Wv[0]));
1106: for (PetscInt vj = PetscMax(vs[1] - 1, 0); vj < PetscMin(vs[1] + 3, vend[1]); ++vj) {
1107: const PetscInt rc = xip * xend[1] + xj;
1108: const PetscInt rv = vi * vend[1] + vj;
1110: PetscCall(DMSwarmSortGetPointsPerCell(*rsw, rc, &rNpc, &rpidx));
1111: if (vcdim > 1) PetscCall(W_3_Interpolation_Private((vmin[1] + (vj + 0.5) * hv[1] - v[p * dim + 1]) / hv[1], &Wv[1]));
1112: else Wv[1] = 1.;
1113: if (debug > 2)
1114: PetscCall(PetscPrintf(PETSC_COMM_SELF, " Depositing on particle (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ") w = %g (%g, %g, %g, %g)\n", xi, xj, vi, vj, (double)(wp * Wx[0] * Wx[1] * Wv[0] * Wv[1]), (double)Wx[0], (double)Wx[1], (double)Wv[0], (double)Wv[1]));
1115: // Add weight to new particles from original particle using interpolation function
1116: PetscCheck(rNpc == vend[0] * vend[1], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid particle velocity binning");
1117: const PetscInt rp = rpidx[rv];
1118: PetscCheck(rp >= 0 && rp < Np, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Particle index %" PetscInt_FMT " not in [0, %" PetscInt_FMT ")", rp, Np);
1119: rw[rp] += wp * Wx[0] * Wx[1] * Wv[0] * Wv[1];
1120: if (debug > 2) PetscCall(PetscPrintf(PETSC_COMM_SELF, " Adding weight %g (%g) to particle %" PetscInt_FMT "\n", (double)(wp * Wx[0] * Wx[1] * Wv[0] * Wv[1]), (double)PetscRealPart(rw[rp]), rp));
1121: PetscCall(DMSwarmSortRestorePointsPerCell(*rsw, rc, &rNpc, &rpidx));
1122: }
1123: }
1124: }
1125: }
1126: }
1127: PetscCall(DMSwarmSortRestorePointsPerCell(sw, c, &Npc, &pidx));
1128: }
1129: PetscCall(DMSwarmSortRestoreAccess(sw));
1130: PetscCall(DMSwarmSortRestoreAccess(*rsw));
1131: PetscCall(DMSwarmRestoreField(sw, coordFields[0], NULL, NULL, (void **)&x));
1132: PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v));
1133: PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w));
1134: PetscCall(DMSwarmRestoreField(*rsw, "w_q", NULL, NULL, (void **)&rw));
1136: if (debug) {
1137: Vec w;
1139: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, coordFields[0], &w));
1140: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1141: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, coordFields[0], &w));
1142: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &w));
1143: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1144: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &w));
1145: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &w));
1146: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1147: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &w));
1148: PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, coordFields[0], &w));
1149: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1150: PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, coordFields[0], &w));
1151: PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, "velocity", &w));
1152: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1153: PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, "velocity", &w));
1154: PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, "w_q", &w));
1155: PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1156: PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, "w_q", &w));
1157: }
1158: PetscFunctionReturn(PETSC_SUCCESS);
1159: }
1161: static void f0_v2(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
1162: {
1163: PetscInt d;
1165: f0[0] = 0.0;
1166: for (d = dim / 2; d < dim; ++d) f0[0] += PetscSqr(x[d]) * u[0];
1167: }
1169: static PetscErrorCode DMSwarmRemap_PFAK_Internal(DM sw, DM *rsw)
1170: {
1171: DM xdm, vdm, rdm;
1172: DMSwarmCellDM rcelldm;
1173: Mat M_p, rM_p, rPM_p;
1174: Vec w, rw, rhs;
1175: PetscInt Nf;
1176: const char **fields;
1178: PetscFunctionBegin;
1179: // Create a new centroid swarm without weights
1180: PetscCall(DMSwarmGetCellDM(sw, &xdm));
1181: PetscCall(DMSwarmSetCellDMActive(sw, "velocity"));
1182: PetscCall(DMSwarmGetCellDMActive(sw, &rcelldm));
1183: PetscCall(DMSwarmCellDMGetDM(rcelldm, &vdm));
1184: PetscCall(DMSwarmDuplicate(sw, rsw));
1185: // Set remap cell DM
1186: PetscCall(DMSwarmSetCellDMActive(sw, "remap"));
1187: PetscCall(DMSwarmGetCellDMActive(sw, &rcelldm));
1188: PetscCall(DMSwarmCellDMGetFields(rcelldm, &Nf, &fields));
1189: PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "We only allow a single weight field, not %" PetscInt_FMT, Nf);
1190: PetscCall(DMSwarmGetCellDM(sw, &rdm));
1191: PetscCall(DMGetGlobalVector(rdm, &rhs));
1192: PetscCall(DMSwarmMigrate(sw, PETSC_FALSE)); // Bin particles in remap mesh
1193: // Compute rhs = M_p w_p
1194: PetscCall(DMCreateMassMatrix(sw, rdm, &M_p));
1195: PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fields[0], &w));
1196: PetscCall(VecViewFromOptions(w, NULL, "-remap_w_view"));
1197: PetscCall(MatMultTranspose(M_p, w, rhs));
1198: PetscCall(VecViewFromOptions(rhs, NULL, "-remap_rhs_view"));
1199: PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fields[0], &w));
1200: PetscCall(MatDestroy(&M_p));
1201: {
1202: KSP ksp;
1203: Mat M_f;
1204: Vec u_f;
1205: PetscReal mom[4];
1206: PetscInt cdim;
1207: const char *prefix;
1209: PetscCall(DMGetCoordinateDim(rdm, &cdim));
1210: PetscCall(DMCreateMassMatrix(rdm, rdm, &M_f));
1211: PetscCall(DMGetGlobalVector(rdm, &u_f));
1213: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
1214: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
1215: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1216: PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
1217: PetscCall(KSPSetFromOptions(ksp));
1219: PetscCall(KSPSetOperators(ksp, M_f, M_f));
1220: PetscCall(KSPSolve(ksp, rhs, u_f));
1221: PetscCall(KSPDestroy(&ksp));
1222: PetscCall(VecViewFromOptions(u_f, NULL, "-remap_uf_view"));
1224: PetscCall(DMPlexComputeMoments(rdm, u_f, mom));
1225: // Energy is not correct since it uses (x^2 + v^2)
1226: PetscDS rds;
1227: PetscScalar rmom;
1228: void *ctx;
1230: PetscCall(DMGetDS(rdm, &rds));
1231: PetscCall(DMGetApplicationContext(rdm, &ctx));
1232: PetscCall(PetscDSSetObjective(rds, 0, &f0_v2));
1233: PetscCall(DMPlexComputeIntegralFEM(rdm, u_f, &rmom, ctx));
1234: mom[1 + cdim] = PetscRealPart(rmom);
1236: PetscCall(DMRestoreGlobalVector(rdm, &u_f));
1237: PetscCall(PetscPrintf(PETSC_COMM_SELF, "========== PFAK u_f ==========\n"));
1238: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 0: %g\n", (double)mom[0]));
1239: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom x: %g\n", (double)mom[1 + 0]));
1240: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom v: %g\n", (double)mom[1 + 1]));
1241: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 2: %g\n", (double)mom[1 + cdim]));
1242: PetscCall(MatDestroy(&M_f));
1243: }
1244: // Create Remap particle mass matrix M_p
1245: PetscInt xcStart, xcEnd, vcStart, vcEnd, cStart, cEnd, r;
1247: PetscCall(DMSwarmSetCellDMActive(*rsw, "remap"));
1248: PetscCall(DMPlexGetHeightStratum(xdm, 0, &xcStart, &xcEnd));
1249: PetscCall(DMPlexGetHeightStratum(vdm, 0, &vcStart, &vcEnd));
1250: PetscCall(DMPlexGetHeightStratum(rdm, 0, &cStart, &cEnd));
1251: r = (PetscInt)PetscSqrtReal(((xcEnd - xcStart) * (vcEnd - vcStart)) / (cEnd - cStart));
1252: PetscCall(InitializeParticles_Regular(*rsw, r));
1253: PetscCall(DMSwarmMigrate(*rsw, PETSC_FALSE)); // Bin particles in remap mesh
1254: PetscCall(DMCreateMassMatrix(*rsw, rdm, &rM_p));
1255: PetscCall(MatViewFromOptions(rM_p, NULL, "-rM_p_view"));
1256: // Solve M_p
1257: {
1258: KSP ksp;
1259: PC pc;
1260: const char *prefix;
1261: PetscBool isBjacobi;
1263: PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
1264: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
1265: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1266: PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
1267: PetscCall(KSPSetFromOptions(ksp));
1269: PetscCall(KSPGetPC(ksp, &pc));
1270: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
1271: if (isBjacobi) {
1272: PetscCall(DMSwarmCreateMassMatrixSquare(sw, rdm, &rPM_p));
1273: } else {
1274: rPM_p = rM_p;
1275: PetscCall(PetscObjectReference((PetscObject)rPM_p));
1276: }
1277: PetscCall(KSPSetOperators(ksp, rM_p, rPM_p));
1278: PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, fields[0], &rw));
1279: PetscCall(KSPSolveTranspose(ksp, rhs, rw));
1280: PetscCall(VecViewFromOptions(rw, NULL, "-remap_rw_view"));
1281: PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, fields[0], &rw));
1282: PetscCall(KSPDestroy(&ksp));
1283: PetscCall(MatDestroy(&rPM_p));
1284: PetscCall(MatDestroy(&rM_p));
1285: }
1286: PetscCall(DMRestoreGlobalVector(rdm, &rhs));
1288: // Restore original cell DM
1289: PetscCall(DMSwarmSetCellDMActive(sw, "space"));
1290: PetscCall(DMSwarmSetCellDMActive(*rsw, "space"));
1291: PetscCall(DMSwarmMigrate(*rsw, PETSC_FALSE)); // Bin particles in spatial mesh
1292: PetscFunctionReturn(PETSC_SUCCESS);
1293: }
1295: static PetscErrorCode DMSwarmRemapMonitor_Internal(DM sw, DM rsw)
1296: {
1297: PetscReal mom[4], rmom[4];
1298: PetscInt cdim;
1300: PetscFunctionBegin;
1301: PetscCall(DMGetCoordinateDim(sw, &cdim));
1302: PetscCall(DMSwarmComputeMoments(sw, "velocity", "w_q", mom));
1303: PetscCall(DMSwarmComputeMoments(rsw, "velocity", "w_q", rmom));
1304: PetscCall(PetscPrintf(PETSC_COMM_SELF, "========== Remapped ==========\n"));
1305: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 0: %g --> %g\n", (double)mom[0], (double)rmom[0]));
1306: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 1: %g --> %g\n", (double)mom[1], (double)rmom[1]));
1307: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 2: %g --> %g\n", (double)mom[1 + cdim], (double)rmom[1 + cdim]));
1308: PetscFunctionReturn(PETSC_SUCCESS);
1309: }
1311: /*@
1312: DMSwarmRemap - Project the swarm fields onto a new set of particles
1314: Collective
1316: Input Parameter:
1317: . sw - The `DMSWARM` object
1319: Level: beginner
1321: .seealso: [](ch_dmbase), `DMSWARM`, `DMSwarmMigrate()`, `DMSwarmCrate()`
1322: @*/
1323: PetscErrorCode DMSwarmRemap(DM sw)
1324: {
1325: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1326: DM rsw;
1328: PetscFunctionBegin;
1329: switch (swarm->remap_type) {
1330: case DMSWARM_REMAP_NONE:
1331: PetscFunctionReturn(PETSC_SUCCESS);
1332: case DMSWARM_REMAP_COLELLA:
1333: PetscCall(DMSwarmRemap_Colella_Internal(sw, &rsw));
1334: break;
1335: case DMSWARM_REMAP_PFAK:
1336: PetscCall(DMSwarmRemap_PFAK_Internal(sw, &rsw));
1337: break;
1338: default:
1339: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No remap algorithm %s", DMSwarmRemapTypeNames[swarm->remap_type]);
1340: }
1341: PetscCall(DMSwarmRemapMonitor_Internal(sw, rsw));
1342: PetscCall(DMSwarmReplace(sw, &rsw));
1343: PetscFunctionReturn(PETSC_SUCCESS);
1344: }