Actual source code: characteristic.c
petsc-3.11.4 2019-09-28
2: #include <petsc/private/characteristicimpl.h>
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17: PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24: {
25: PetscBool iascii;
30: if (!viewer) {
31: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
32: }
36: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
37: if (!iascii) {
38: if (c->ops->view) {
39: (*c->ops->view)(c, viewer);
40: }
41: }
42: return(0);
43: }
45: PetscErrorCode CharacteristicDestroy(Characteristic *c)
46: {
50: if (!*c) return(0);
52: if (--((PetscObject)(*c))->refct > 0) return(0);
54: if ((*c)->ops->destroy) {
55: (*(*c)->ops->destroy)((*c));
56: }
57: MPI_Type_free(&(*c)->itemType);
58: PetscFree((*c)->queue);
59: PetscFree((*c)->queueLocal);
60: PetscFree((*c)->queueRemote);
61: PetscFree((*c)->neighbors);
62: PetscFree((*c)->needCount);
63: PetscFree((*c)->localOffsets);
64: PetscFree((*c)->fillCount);
65: PetscFree((*c)->remoteOffsets);
66: PetscFree((*c)->request);
67: PetscFree((*c)->status);
68: PetscHeaderDestroy(c);
69: return(0);
70: }
72: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
73: {
74: Characteristic newC;
79: *c = NULL;
80: CharacteristicInitializePackage();
82: PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView);
83: *c = newC;
85: newC->structured = PETSC_TRUE;
86: newC->numIds = 0;
87: newC->velocityDA = NULL;
88: newC->velocity = NULL;
89: newC->velocityOld = NULL;
90: newC->numVelocityComp = 0;
91: newC->velocityComp = NULL;
92: newC->velocityInterp = NULL;
93: newC->velocityInterpLocal = NULL;
94: newC->velocityCtx = NULL;
95: newC->fieldDA = NULL;
96: newC->field = NULL;
97: newC->numFieldComp = 0;
98: newC->fieldComp = NULL;
99: newC->fieldInterp = NULL;
100: newC->fieldInterpLocal = NULL;
101: newC->fieldCtx = NULL;
102: newC->itemType = 0;
103: newC->queue = NULL;
104: newC->queueSize = 0;
105: newC->queueMax = 0;
106: newC->queueLocal = NULL;
107: newC->queueLocalSize = 0;
108: newC->queueLocalMax = 0;
109: newC->queueRemote = NULL;
110: newC->queueRemoteSize = 0;
111: newC->queueRemoteMax = 0;
112: newC->numNeighbors = 0;
113: newC->neighbors = NULL;
114: newC->needCount = NULL;
115: newC->localOffsets = NULL;
116: newC->fillCount = NULL;
117: newC->remoteOffsets = NULL;
118: newC->request = NULL;
119: newC->status = NULL;
120: return(0);
121: }
123: /*@C
124: CharacteristicSetType - Builds Characteristic for a particular solver.
126: Logically Collective on Characteristic
128: Input Parameters:
129: + c - the method of characteristics context
130: - type - a known method
132: Options Database Key:
133: . -characteristic_type <method> - Sets the method; use -help for a list
134: of available methods
136: Notes:
137: See "include/petsccharacteristic.h" for available methods
139: Normally, it is best to use the CharacteristicSetFromOptions() command and
140: then set the Characteristic type from the options database rather than by using
141: this routine. Using the options database provides the user with
142: maximum flexibility in evaluating the many different Krylov methods.
143: The CharacteristicSetType() routine is provided for those situations where it
144: is necessary to set the iterative solver independently of the command
145: line or options database. This might be the case, for example, when
146: the choice of iterative solver changes during the execution of the
147: program, and the user's application is taking responsibility for
148: choosing the appropriate method. In other words, this routine is
149: not for beginners.
151: Level: intermediate
153: .keywords: Characteristic, set, method
155: .seealso: CharacteristicType
157: @*/
158: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
159: {
160: PetscErrorCode ierr, (*r)(Characteristic);
161: PetscBool match;
167: PetscObjectTypeCompare((PetscObject) c, type, &match);
168: if (match) return(0);
170: if (c->data) {
171: /* destroy the old private Characteristic context */
172: (*c->ops->destroy)(c);
173: c->ops->destroy = NULL;
174: c->data = 0;
175: }
177: PetscFunctionListFind(CharacteristicList,type,&r);
178: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
179: c->setupcalled = 0;
180: (*r)(c);
181: PetscObjectChangeTypeName((PetscObject) c, type);
182: return(0);
183: }
185: /*@
186: CharacteristicSetUp - Sets up the internal data structures for the
187: later use of an iterative solver.
189: Collective on Characteristic
191: Input Parameter:
192: . ksp - iterative context obtained from CharacteristicCreate()
194: Level: developer
196: .keywords: Characteristic, setup
198: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
199: @*/
200: PetscErrorCode CharacteristicSetUp(Characteristic c)
201: {
207: if (!((PetscObject)c)->type_name) {
208: CharacteristicSetType(c, CHARACTERISTICDA);
209: }
211: if (c->setupcalled == 2) return(0);
213: PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
214: if (!c->setupcalled) {
215: (*c->ops->setup)(c);
216: }
217: PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
218: c->setupcalled = 2;
219: return(0);
220: }
222: /*@C
223: CharacteristicRegister - Adds a solver to the method of characteristics package.
225: Not Collective
227: Input Parameters:
228: + name_solver - name of a new user-defined solver
229: - routine_create - routine to create method context
231: Sample usage:
232: .vb
233: CharacteristicRegister("my_char", MyCharCreate);
234: .ve
236: Then, your Characteristic type can be chosen with the procedural interface via
237: .vb
238: CharacteristicCreate(MPI_Comm, Characteristic* &char);
239: CharacteristicSetType(char,"my_char");
240: .ve
241: or at runtime via the option
242: .vb
243: -characteristic_type my_char
244: .ve
246: Notes:
247: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
249: .keywords: Characteristic, register
251: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()
253: Level: advanced
254: @*/
255: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
256: {
260: CharacteristicInitializePackage();
261: PetscFunctionListAdd(&CharacteristicList,sname,function);
262: return(0);
263: }
265: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
266: {
268: c->velocityDA = da;
269: c->velocity = v;
270: c->velocityOld = vOld;
271: c->numVelocityComp = numComponents;
272: c->velocityComp = components;
273: c->velocityInterp = interp;
274: c->velocityCtx = ctx;
275: return(0);
276: }
278: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
279: {
281: c->velocityDA = da;
282: c->velocity = v;
283: c->velocityOld = vOld;
284: c->numVelocityComp = numComponents;
285: c->velocityComp = components;
286: c->velocityInterpLocal = interp;
287: c->velocityCtx = ctx;
288: return(0);
289: }
291: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
292: {
294: #if 0
295: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
296: #endif
297: c->fieldDA = da;
298: c->field = v;
299: c->numFieldComp = numComponents;
300: c->fieldComp = components;
301: c->fieldInterp = interp;
302: c->fieldCtx = ctx;
303: return(0);
304: }
306: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
307: {
309: #if 0
310: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
311: #endif
312: c->fieldDA = da;
313: c->field = v;
314: c->numFieldComp = numComponents;
315: c->fieldComp = components;
316: c->fieldInterpLocal = interp;
317: c->fieldCtx = ctx;
318: return(0);
319: }
321: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
322: {
323: CharacteristicPointDA2D Qi;
324: DM da = c->velocityDA;
325: Vec velocityLocal, velocityLocalOld;
326: Vec fieldLocal;
327: DMDALocalInfo info;
328: PetscScalar **solArray;
329: void *velocityArray;
330: void *velocityArrayOld;
331: void *fieldArray;
332: PetscScalar *interpIndices;
333: PetscScalar *velocityValues, *velocityValuesOld;
334: PetscScalar *fieldValues;
335: PetscMPIInt rank;
336: PetscInt dim;
337: PetscMPIInt neighbors[9];
338: PetscInt dof;
339: PetscInt gx, gy;
340: PetscInt n, is, ie, js, je, comp;
341: PetscErrorCode ierr;
344: c->queueSize = 0;
345: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
346: DMDAGetNeighborsRank(da, neighbors);
347: CharacteristicSetNeighbors(c, 9, neighbors);
348: CharacteristicSetUp(c);
349: /* global and local grid info */
350: DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
351: DMDAGetLocalInfo(da, &info);
352: is = info.xs; ie = info.xs+info.xm;
353: js = info.ys; je = info.ys+info.ym;
354: /* Allocation */
355: PetscMalloc1(dim, &interpIndices);
356: PetscMalloc1(c->numVelocityComp, &velocityValues);
357: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
358: PetscMalloc1(c->numFieldComp, &fieldValues);
359: PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);
361: /* -----------------------------------------------------------------------
362: PART 1, AT t-dt/2
363: -----------------------------------------------------------------------*/
364: PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
365: /* GET POSITION AT HALF TIME IN THE PAST */
366: if (c->velocityInterpLocal) {
367: DMGetLocalVector(c->velocityDA, &velocityLocal);
368: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
369: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
370: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
371: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
372: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
373: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
374: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
375: }
376: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
377: for (Qi.j = js; Qi.j < je; Qi.j++) {
378: for (Qi.i = is; Qi.i < ie; Qi.i++) {
379: interpIndices[0] = Qi.i;
380: interpIndices[1] = Qi.j;
381: if (c->velocityInterpLocal) {c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
382: else {c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
383: Qi.x = Qi.i - velocityValues[0]*dt/2.0;
384: Qi.y = Qi.j - velocityValues[1]*dt/2.0;
386: /* Determine whether the position at t - dt/2 is local */
387: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
389: /* Check for Periodic boundaries and move all periodic points back onto the domain */
390: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
391: CharacteristicAddPoint(c, &Qi);
392: }
393: }
394: PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);
396: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
397: CharacteristicSendCoordinatesBegin(c);
398: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
400: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
401: /* Calculate velocity at t_n+1/2 (local values) */
402: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
403: for (n = 0; n < c->queueSize; n++) {
404: Qi = c->queue[n];
405: if (c->neighbors[Qi.proc] == rank) {
406: interpIndices[0] = Qi.x;
407: interpIndices[1] = Qi.y;
408: if (c->velocityInterpLocal) {
409: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
410: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
411: } else {
412: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
413: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
414: }
415: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
416: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
417: }
418: c->queue[n] = Qi;
419: }
420: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
422: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
423: CharacteristicSendCoordinatesEnd(c);
424: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
427: /* Calculate velocity at t_n+1/2 (fill remote requests) */
428: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
429: PetscInfo1(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
430: for (n = 0; n < c->queueRemoteSize; n++) {
431: Qi = c->queueRemote[n];
432: interpIndices[0] = Qi.x;
433: interpIndices[1] = Qi.y;
434: if (c->velocityInterpLocal) {
435: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
436: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
437: } else {
438: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
439: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
440: }
441: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
442: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
443: c->queueRemote[n] = Qi;
444: }
445: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
446: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
447: CharacteristicGetValuesBegin(c);
448: CharacteristicGetValuesEnd(c);
449: if (c->velocityInterpLocal) {
450: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
451: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
452: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
453: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
454: }
455: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
457: /* -----------------------------------------------------------------------
458: PART 2, AT t-dt
459: -----------------------------------------------------------------------*/
461: /* GET POSITION AT t_n (local values) */
462: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
463: PetscInfo(NULL, "Calculating position at t_{n}\n");
464: for (n = 0; n < c->queueSize; n++) {
465: Qi = c->queue[n];
466: Qi.x = Qi.i - Qi.x*dt;
467: Qi.y = Qi.j - Qi.y*dt;
469: /* Determine whether the position at t-dt is local */
470: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
472: /* Check for Periodic boundaries and move all periodic points back onto the domain */
473: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
475: c->queue[n] = Qi;
476: }
477: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
479: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
480: CharacteristicSendCoordinatesBegin(c);
481: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
483: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
484: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
485: if (c->fieldInterpLocal) {
486: DMGetLocalVector(c->fieldDA, &fieldLocal);
487: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
488: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
489: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
490: }
491: PetscInfo(NULL, "Calculating local field at t_{n}\n");
492: for (n = 0; n < c->queueSize; n++) {
493: if (c->neighbors[c->queue[n].proc] == rank) {
494: interpIndices[0] = c->queue[n].x;
495: interpIndices[1] = c->queue[n].y;
496: if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
497: else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
498: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
499: }
500: }
501: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
503: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
504: CharacteristicSendCoordinatesEnd(c);
505: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
507: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
508: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
509: PetscInfo1(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
510: for (n = 0; n < c->queueRemoteSize; n++) {
511: interpIndices[0] = c->queueRemote[n].x;
512: interpIndices[1] = c->queueRemote[n].y;
514: /* for debugging purposes */
515: if (1) { /* hacked bounds test...let's do better */
516: PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
518: if ((im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar) js - 1.) || (jm > (PetscScalar) je)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
519: }
521: if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
522: else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
523: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
524: }
525: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
527: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
528: CharacteristicGetValuesBegin(c);
529: CharacteristicGetValuesEnd(c);
530: if (c->fieldInterpLocal) {
531: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
532: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
533: }
534: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
536: /* Return field of characteristics at t_n-1 */
537: PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
538: DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
539: DMDAVecGetArray(c->fieldDA, solution, &solArray);
540: for (n = 0; n < c->queueSize; n++) {
541: Qi = c->queue[n];
542: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
543: }
544: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
545: PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
546: PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);
548: /* Cleanup */
549: PetscFree(interpIndices);
550: PetscFree(velocityValues);
551: PetscFree(velocityValuesOld);
552: PetscFree(fieldValues);
553: return(0);
554: }
556: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
557: {
561: c->numNeighbors = numNeighbors;
562: PetscFree(c->neighbors);
563: PetscMalloc1(numNeighbors, &c->neighbors);
564: PetscMemcpy(c->neighbors, neighbors, numNeighbors * sizeof(PetscMPIInt));
565: return(0);
566: }
568: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
569: {
571: if (c->queueSize >= c->queueMax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
572: c->queue[c->queueSize++] = *point;
573: return(0);
574: }
576: int CharacteristicSendCoordinatesBegin(Characteristic c)
577: {
578: PetscMPIInt rank, tag = 121;
579: PetscInt i, n;
583: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
584: CharacteristicHeapSort(c, c->queue, c->queueSize);
585: PetscMemzero(c->needCount, c->numNeighbors * sizeof(PetscInt));
586: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
587: c->fillCount[0] = 0;
588: for (n = 1; n < c->numNeighbors; n++) {
589: MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
590: }
591: for (n = 1; n < c->numNeighbors; n++) {
592: MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
593: }
594: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
595: /* Initialize the remote queue */
596: c->queueLocalMax = c->localOffsets[0] = 0;
597: c->queueRemoteMax = c->remoteOffsets[0] = 0;
598: for (n = 1; n < c->numNeighbors; n++) {
599: c->remoteOffsets[n] = c->queueRemoteMax;
600: c->queueRemoteMax += c->fillCount[n];
601: c->localOffsets[n] = c->queueLocalMax;
602: c->queueLocalMax += c->needCount[n];
603: }
604: /* HACK BEGIN */
605: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
606: c->needCount[0] = 0;
607: /* HACK END */
608: if (c->queueRemoteMax) {
609: PetscMalloc1(c->queueRemoteMax, &c->queueRemote);
610: } else c->queueRemote = NULL;
611: c->queueRemoteSize = c->queueRemoteMax;
613: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
614: for (n = 1; n < c->numNeighbors; n++) {
615: PetscInfo2(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
616: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
617: }
618: for (n = 1; n < c->numNeighbors; n++) {
619: PetscInfo2(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
620: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
621: }
622: return(0);
623: }
625: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
626: {
627: #if 0
628: PetscMPIInt rank;
629: PetscInt n;
630: #endif
634: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
635: #if 0
636: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
637: for (n = 0; n < c->queueRemoteSize; n++) {
638: if (c->neighbors[c->queueRemote[n].proc] == rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
639: }
640: #endif
641: return(0);
642: }
644: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
645: {
646: PetscMPIInt tag = 121;
647: PetscInt n;
651: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
652: for (n = 1; n < c->numNeighbors; n++) {
653: MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
654: }
655: for (n = 1; n < c->numNeighbors; n++) {
656: MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
657: }
658: return(0);
659: }
661: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
662: {
666: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
667: /* Free queue of requests from other procs */
668: PetscFree(c->queueRemote);
669: return(0);
670: }
672: /*---------------------------------------------------------------------*/
673: /*
674: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
675: */
676: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
677: /*---------------------------------------------------------------------*/
678: {
679: PetscErrorCode ierr;
680: CharacteristicPointDA2D temp;
681: PetscInt n;
684: if (0) { /* Check the order of the queue before sorting */
685: PetscInfo(NULL, "Before Heap sort\n");
686: for (n=0; n<size; n++) {
687: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
688: }
689: }
691: /* SORTING PHASE */
692: for (n = (size / 2)-1; n >= 0; n--) {
693: CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
694: }
695: for (n = size-1; n >= 1; n--) {
696: temp = queue[0];
697: queue[0] = queue[n];
698: queue[n] = temp;
699: CharacteristicSiftDown(c, queue, 0, n-1);
700: }
701: if (0) { /* Check the order of the queue after sorting */
702: PetscInfo(NULL, "Avter Heap sort\n");
703: for (n=0; n<size; n++) {
704: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
705: }
706: }
707: return(0);
708: }
710: /*---------------------------------------------------------------------*/
711: /*
712: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
713: */
714: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
715: /*---------------------------------------------------------------------*/
716: {
717: PetscBool done = PETSC_FALSE;
718: PetscInt maxChild;
719: CharacteristicPointDA2D temp;
722: while ((root*2 <= bottom) && (!done)) {
723: if (root*2 == bottom) maxChild = root * 2;
724: else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
725: else maxChild = root * 2 + 1;
727: if (queue[root].proc < queue[maxChild].proc) {
728: temp = queue[root];
729: queue[root] = queue[maxChild];
730: queue[maxChild] = temp;
731: root = maxChild;
732: } else done = PETSC_TRUE;
733: }
734: return(0);
735: }
737: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
738: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
739: {
740: DMBoundaryType bx, by;
741: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
742: MPI_Comm comm;
743: PetscMPIInt rank;
744: PetscInt **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
745: PetscErrorCode ierr;
748: PetscObjectGetComm((PetscObject) da, &comm);
749: MPI_Comm_rank(comm, &rank);
750: DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);
752: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
753: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
755: neighbors[0] = rank;
756: rank = 0;
757: PetscMalloc1(PJ,&procs);
758: for (pj=0; pj<PJ; pj++) {
759: PetscMalloc1(PI,&(procs[pj]));
760: for (pi=0; pi<PI; pi++) {
761: procs[pj][pi] = rank;
762: rank++;
763: }
764: }
766: pi = neighbors[0] % PI;
767: pj = neighbors[0] / PI;
768: pim = pi-1; if (pim<0) pim=PI-1;
769: pip = (pi+1)%PI;
770: pjm = pj-1; if (pjm<0) pjm=PJ-1;
771: pjp = (pj+1)%PJ;
773: neighbors[1] = procs[pj] [pim];
774: neighbors[2] = procs[pjp][pim];
775: neighbors[3] = procs[pjp][pi];
776: neighbors[4] = procs[pjp][pip];
777: neighbors[5] = procs[pj] [pip];
778: neighbors[6] = procs[pjm][pip];
779: neighbors[7] = procs[pjm][pi];
780: neighbors[8] = procs[pjm][pim];
782: if (!IPeriodic) {
783: if (pi==0) neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
784: if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
785: }
787: if (!JPeriodic) {
788: if (pj==0) neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
789: if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
790: }
792: for (pj = 0; pj < PJ; pj++) {
793: PetscFree(procs[pj]);
794: }
795: PetscFree(procs);
796: return(0);
797: }
799: /*
800: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
801: 2 | 3 | 4
802: __|___|__
803: 1 | 0 | 5
804: __|___|__
805: 8 | 7 | 6
806: | |
807: */
808: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
809: {
810: DMDALocalInfo info;
811: PetscReal is,ie,js,je;
814: DMDAGetLocalInfo(da, &info);
815: is = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
816: js = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;
818: if (ir >= is && ir <= ie) { /* center column */
819: if (jr >= js && jr <= je) return 0;
820: else if (jr < js) return 7;
821: else return 3;
822: } else if (ir < is) { /* left column */
823: if (jr >= js && jr <= je) return 1;
824: else if (jr < js) return 8;
825: else return 2;
826: } else { /* right column */
827: if (jr >= js && jr <= je) return 5;
828: else if (jr < js) return 6;
829: else return 4;
830: }
831: }