Actual source code: characteristic.c
petsc-3.7.3 2016-08-01
2: #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17: PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
25: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
26: {
27: PetscBool iascii;
32: if (!viewer) {
33: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
34: }
38: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
39: if (!iascii) {
40: if (c->ops->view) {
41: (*c->ops->view)(c, viewer);
42: }
43: }
44: return(0);
45: }
49: PetscErrorCode CharacteristicDestroy(Characteristic *c)
50: {
54: if (!*c) return(0);
56: if (--((PetscObject)(*c))->refct > 0) return(0);
58: if ((*c)->ops->destroy) {
59: (*(*c)->ops->destroy)((*c));
60: }
61: MPI_Type_free(&(*c)->itemType);
62: PetscFree((*c)->queue);
63: PetscFree((*c)->queueLocal);
64: PetscFree((*c)->queueRemote);
65: PetscFree((*c)->neighbors);
66: PetscFree((*c)->needCount);
67: PetscFree((*c)->localOffsets);
68: PetscFree((*c)->fillCount);
69: PetscFree((*c)->remoteOffsets);
70: PetscFree((*c)->request);
71: PetscFree((*c)->status);
72: PetscHeaderDestroy(c);
73: return(0);
74: }
78: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
79: {
80: Characteristic newC;
85: *c = NULL;
86: CharacteristicInitializePackage();
88: PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "SemiLagrange", comm, CharacteristicDestroy, CharacteristicView);
89: *c = newC;
91: newC->structured = PETSC_TRUE;
92: newC->numIds = 0;
93: newC->velocityDA = NULL;
94: newC->velocity = NULL;
95: newC->velocityOld = NULL;
96: newC->numVelocityComp = 0;
97: newC->velocityComp = NULL;
98: newC->velocityInterp = NULL;
99: newC->velocityInterpLocal = NULL;
100: newC->velocityCtx = NULL;
101: newC->fieldDA = NULL;
102: newC->field = NULL;
103: newC->numFieldComp = 0;
104: newC->fieldComp = NULL;
105: newC->fieldInterp = NULL;
106: newC->fieldInterpLocal = NULL;
107: newC->fieldCtx = NULL;
108: newC->itemType = 0;
109: newC->queue = NULL;
110: newC->queueSize = 0;
111: newC->queueMax = 0;
112: newC->queueLocal = NULL;
113: newC->queueLocalSize = 0;
114: newC->queueLocalMax = 0;
115: newC->queueRemote = NULL;
116: newC->queueRemoteSize = 0;
117: newC->queueRemoteMax = 0;
118: newC->numNeighbors = 0;
119: newC->neighbors = NULL;
120: newC->needCount = NULL;
121: newC->localOffsets = NULL;
122: newC->fillCount = NULL;
123: newC->remoteOffsets = NULL;
124: newC->request = NULL;
125: newC->status = NULL;
126: return(0);
127: }
131: /*@C
132: CharacteristicSetType - Builds Characteristic for a particular solver.
134: Logically Collective on Characteristic
136: Input Parameters:
137: + c - the method of characteristics context
138: - type - a known method
140: Options Database Key:
141: . -characteristic_type <method> - Sets the method; use -help for a list
142: of available methods
144: Notes:
145: See "include/petsccharacteristic.h" for available methods
147: Normally, it is best to use the CharacteristicSetFromOptions() command and
148: then set the Characteristic type from the options database rather than by using
149: this routine. Using the options database provides the user with
150: maximum flexibility in evaluating the many different Krylov methods.
151: The CharacteristicSetType() routine is provided for those situations where it
152: is necessary to set the iterative solver independently of the command
153: line or options database. This might be the case, for example, when
154: the choice of iterative solver changes during the execution of the
155: program, and the user's application is taking responsibility for
156: choosing the appropriate method. In other words, this routine is
157: not for beginners.
159: Level: intermediate
161: .keywords: Characteristic, set, method
163: .seealso: CharacteristicType
165: @*/
166: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
167: {
168: PetscErrorCode ierr, (*r)(Characteristic);
169: PetscBool match;
175: PetscObjectTypeCompare((PetscObject) c, type, &match);
176: if (match) return(0);
178: if (c->data) {
179: /* destroy the old private Characteristic context */
180: (*c->ops->destroy)(c);
181: c->ops->destroy = NULL;
182: c->data = 0;
183: }
185: PetscFunctionListFind(CharacteristicList,type,&r);
186: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
187: c->setupcalled = 0;
188: (*r)(c);
189: PetscObjectChangeTypeName((PetscObject) c, type);
190: return(0);
191: }
195: /*@
196: CharacteristicSetUp - Sets up the internal data structures for the
197: later use of an iterative solver.
199: Collective on Characteristic
201: Input Parameter:
202: . ksp - iterative context obtained from CharacteristicCreate()
204: Level: developer
206: .keywords: Characteristic, setup
208: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
209: @*/
210: PetscErrorCode CharacteristicSetUp(Characteristic c)
211: {
217: if (!((PetscObject)c)->type_name) {
218: CharacteristicSetType(c, CHARACTERISTICDA);
219: }
221: if (c->setupcalled == 2) return(0);
223: PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
224: if (!c->setupcalled) {
225: (*c->ops->setup)(c);
226: }
227: PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
228: c->setupcalled = 2;
229: return(0);
230: }
234: /*@C
235: CharacteristicRegister - Adds a solver to the method of characteristics package.
237: Not Collective
239: Input Parameters:
240: + name_solver - name of a new user-defined solver
241: - routine_create - routine to create method context
243: Sample usage:
244: .vb
245: CharacteristicRegister("my_char", MyCharCreate);
246: .ve
248: Then, your Characteristic type can be chosen with the procedural interface via
249: .vb
250: CharacteristicCreate(MPI_Comm, Characteristic* &char);
251: CharacteristicSetType(char,"my_char");
252: .ve
253: or at runtime via the option
254: .vb
255: -characteristic_type my_char
256: .ve
258: Notes:
259: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
261: .keywords: Characteristic, register
263: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()
265: Level: advanced
266: @*/
267: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
268: {
272: PetscFunctionListAdd(&CharacteristicList,sname,function);
273: return(0);
274: }
278: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
279: {
281: c->velocityDA = da;
282: c->velocity = v;
283: c->velocityOld = vOld;
284: c->numVelocityComp = numComponents;
285: c->velocityComp = components;
286: c->velocityInterp = interp;
287: c->velocityCtx = ctx;
288: return(0);
289: }
293: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
294: {
296: c->velocityDA = da;
297: c->velocity = v;
298: c->velocityOld = vOld;
299: c->numVelocityComp = numComponents;
300: c->velocityComp = components;
301: c->velocityInterpLocal = interp;
302: c->velocityCtx = ctx;
303: return(0);
304: }
308: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
309: {
311: #if 0
312: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
313: #endif
314: c->fieldDA = da;
315: c->field = v;
316: c->numFieldComp = numComponents;
317: c->fieldComp = components;
318: c->fieldInterp = interp;
319: c->fieldCtx = ctx;
320: return(0);
321: }
325: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
326: {
328: #if 0
329: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
330: #endif
331: c->fieldDA = da;
332: c->field = v;
333: c->numFieldComp = numComponents;
334: c->fieldComp = components;
335: c->fieldInterpLocal = interp;
336: c->fieldCtx = ctx;
337: return(0);
338: }
342: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
343: {
344: CharacteristicPointDA2D Qi;
345: DM da = c->velocityDA;
346: Vec velocityLocal, velocityLocalOld;
347: Vec fieldLocal;
348: DMDALocalInfo info;
349: PetscScalar **solArray;
350: void *velocityArray;
351: void *velocityArrayOld;
352: void *fieldArray;
353: PetscScalar *interpIndices;
354: PetscScalar *velocityValues, *velocityValuesOld;
355: PetscScalar *fieldValues;
356: PetscMPIInt rank;
357: PetscInt dim;
358: PetscMPIInt neighbors[9];
359: PetscInt dof;
360: PetscInt gx, gy;
361: PetscInt n, is, ie, js, je, comp;
362: PetscErrorCode ierr;
365: c->queueSize = 0;
366: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
367: DMDAGetNeighborsRank(da, neighbors);
368: CharacteristicSetNeighbors(c, 9, neighbors);
369: CharacteristicSetUp(c);
370: /* global and local grid info */
371: DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
372: DMDAGetLocalInfo(da, &info);
373: is = info.xs; ie = info.xs+info.xm;
374: js = info.ys; je = info.ys+info.ym;
375: /* Allocation */
376: PetscMalloc1(dim, &interpIndices);
377: PetscMalloc1(c->numVelocityComp, &velocityValues);
378: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
379: PetscMalloc1(c->numFieldComp, &fieldValues);
380: PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);
382: /* -----------------------------------------------------------------------
383: PART 1, AT t-dt/2
384: -----------------------------------------------------------------------*/
385: PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
386: /* GET POSITION AT HALF TIME IN THE PAST */
387: if (c->velocityInterpLocal) {
388: DMGetLocalVector(c->velocityDA, &velocityLocal);
389: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
390: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
391: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
392: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
393: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
394: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
395: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
396: }
397: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
398: for (Qi.j = js; Qi.j < je; Qi.j++) {
399: for (Qi.i = is; Qi.i < ie; Qi.i++) {
400: interpIndices[0] = Qi.i;
401: interpIndices[1] = Qi.j;
402: if (c->velocityInterpLocal) {c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
403: else {c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
404: Qi.x = Qi.i - velocityValues[0]*dt/2.0;
405: Qi.y = Qi.j - velocityValues[1]*dt/2.0;
407: /* Determine whether the position at t - dt/2 is local */
408: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
410: /* Check for Periodic boundaries and move all periodic points back onto the domain */
411: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
412: CharacteristicAddPoint(c, &Qi);
413: }
414: }
415: PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);
417: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
418: CharacteristicSendCoordinatesBegin(c);
419: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
421: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
422: /* Calculate velocity at t_n+1/2 (local values) */
423: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
424: for (n = 0; n < c->queueSize; n++) {
425: Qi = c->queue[n];
426: if (c->neighbors[Qi.proc] == rank) {
427: interpIndices[0] = Qi.x;
428: interpIndices[1] = Qi.y;
429: if (c->velocityInterpLocal) {
430: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
431: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
432: } else {
433: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
434: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
435: }
436: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
437: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
438: }
439: c->queue[n] = Qi;
440: }
441: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
443: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
444: CharacteristicSendCoordinatesEnd(c);
445: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
448: /* Calculate velocity at t_n+1/2 (fill remote requests) */
449: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
450: PetscInfo1(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
451: for (n = 0; n < c->queueRemoteSize; n++) {
452: Qi = c->queueRemote[n];
453: interpIndices[0] = Qi.x;
454: interpIndices[1] = Qi.y;
455: if (c->velocityInterpLocal) {
456: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
457: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
458: } else {
459: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
460: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
461: }
462: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
463: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
464: c->queueRemote[n] = Qi;
465: }
466: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
467: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
468: CharacteristicGetValuesBegin(c);
469: CharacteristicGetValuesEnd(c);
470: if (c->velocityInterpLocal) {
471: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
472: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
473: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
474: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
475: }
476: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
478: /* -----------------------------------------------------------------------
479: PART 2, AT t-dt
480: -----------------------------------------------------------------------*/
482: /* GET POSITION AT t_n (local values) */
483: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
484: PetscInfo(NULL, "Calculating position at t_{n}\n");
485: for (n = 0; n < c->queueSize; n++) {
486: Qi = c->queue[n];
487: Qi.x = Qi.i - Qi.x*dt;
488: Qi.y = Qi.j - Qi.y*dt;
490: /* Determine whether the position at t-dt is local */
491: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
493: /* Check for Periodic boundaries and move all periodic points back onto the domain */
494: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
496: c->queue[n] = Qi;
497: }
498: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
500: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
501: CharacteristicSendCoordinatesBegin(c);
502: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
504: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
505: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
506: if (c->fieldInterpLocal) {
507: DMGetLocalVector(c->fieldDA, &fieldLocal);
508: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
509: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
510: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
511: }
512: PetscInfo(NULL, "Calculating local field at t_{n}\n");
513: for (n = 0; n < c->queueSize; n++) {
514: if (c->neighbors[c->queue[n].proc] == rank) {
515: interpIndices[0] = c->queue[n].x;
516: interpIndices[1] = c->queue[n].y;
517: if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
518: else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
519: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
520: }
521: }
522: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
524: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
525: CharacteristicSendCoordinatesEnd(c);
526: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
528: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
529: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
530: PetscInfo1(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
531: for (n = 0; n < c->queueRemoteSize; n++) {
532: interpIndices[0] = c->queueRemote[n].x;
533: interpIndices[1] = c->queueRemote[n].y;
535: /* for debugging purposes */
536: if (1) { /* hacked bounds test...let's do better */
537: PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
539: if ((im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar) js - 1.) || (jm > (PetscScalar) je)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
540: }
542: if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
543: else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
544: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
545: }
546: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
548: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
549: CharacteristicGetValuesBegin(c);
550: CharacteristicGetValuesEnd(c);
551: if (c->fieldInterpLocal) {
552: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
553: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
554: }
555: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
557: /* Return field of characteristics at t_n-1 */
558: PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
559: DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
560: DMDAVecGetArray(c->fieldDA, solution, &solArray);
561: for (n = 0; n < c->queueSize; n++) {
562: Qi = c->queue[n];
563: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
564: }
565: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
566: PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
567: PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);
569: /* Cleanup */
570: PetscFree(interpIndices);
571: PetscFree(velocityValues);
572: PetscFree(velocityValuesOld);
573: PetscFree(fieldValues);
574: return(0);
575: }
579: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
580: {
584: c->numNeighbors = numNeighbors;
585: PetscFree(c->neighbors);
586: PetscMalloc1(numNeighbors, &c->neighbors);
587: PetscMemcpy(c->neighbors, neighbors, numNeighbors * sizeof(PetscMPIInt));
588: return(0);
589: }
593: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
594: {
596: if (c->queueSize >= c->queueMax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
597: c->queue[c->queueSize++] = *point;
598: return(0);
599: }
603: int CharacteristicSendCoordinatesBegin(Characteristic c)
604: {
605: PetscMPIInt rank, tag = 121;
606: PetscInt i, n;
610: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
611: CharacteristicHeapSort(c, c->queue, c->queueSize);
612: PetscMemzero(c->needCount, c->numNeighbors * sizeof(PetscInt));
613: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
614: c->fillCount[0] = 0;
615: for (n = 1; n < c->numNeighbors; n++) {
616: MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
617: }
618: for (n = 1; n < c->numNeighbors; n++) {
619: MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
620: }
621: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
622: /* Initialize the remote queue */
623: c->queueLocalMax = c->localOffsets[0] = 0;
624: c->queueRemoteMax = c->remoteOffsets[0] = 0;
625: for (n = 1; n < c->numNeighbors; n++) {
626: c->remoteOffsets[n] = c->queueRemoteMax;
627: c->queueRemoteMax += c->fillCount[n];
628: c->localOffsets[n] = c->queueLocalMax;
629: c->queueLocalMax += c->needCount[n];
630: }
631: /* HACK BEGIN */
632: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
633: c->needCount[0] = 0;
634: /* HACK END */
635: if (c->queueRemoteMax) {
636: PetscMalloc(sizeof(CharacteristicPointDA2D) * c->queueRemoteMax, &c->queueRemote);
637: } else c->queueRemote = NULL;
638: c->queueRemoteSize = c->queueRemoteMax;
640: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
641: for (n = 1; n < c->numNeighbors; n++) {
642: PetscInfo2(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
643: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
644: }
645: for (n = 1; n < c->numNeighbors; n++) {
646: PetscInfo2(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
647: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
648: }
649: return(0);
650: }
654: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
655: {
656: #if 0
657: PetscMPIInt rank;
658: PetscInt n;
659: #endif
663: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
664: #if 0
665: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
666: for (n = 0; n < c->queueRemoteSize; n++) {
667: if (c->neighbors[c->queueRemote[n].proc] == rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
668: }
669: #endif
670: return(0);
671: }
675: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
676: {
677: PetscMPIInt tag = 121;
678: PetscInt n;
682: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
683: for (n = 1; n < c->numNeighbors; n++) {
684: MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
685: }
686: for (n = 1; n < c->numNeighbors; n++) {
687: MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
688: }
689: return(0);
690: }
694: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
695: {
699: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
700: /* Free queue of requests from other procs */
701: PetscFree(c->queueRemote);
702: return(0);
703: }
705: /*---------------------------------------------------------------------*/
708: /*
709: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
710: */
711: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
712: /*---------------------------------------------------------------------*/
713: {
714: PetscErrorCode ierr;
715: CharacteristicPointDA2D temp;
716: PetscInt n;
719: if (0) { /* Check the order of the queue before sorting */
720: PetscInfo(NULL, "Before Heap sort\n");
721: for (n=0; n<size; n++) {
722: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
723: }
724: }
726: /* SORTING PHASE */
727: for (n = (size / 2)-1; n >= 0; n--) {
728: CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
729: }
730: for (n = size-1; n >= 1; n--) {
731: temp = queue[0];
732: queue[0] = queue[n];
733: queue[n] = temp;
734: CharacteristicSiftDown(c, queue, 0, n-1);
735: }
736: if (0) { /* Check the order of the queue after sorting */
737: PetscInfo(NULL, "Avter Heap sort\n");
738: for (n=0; n<size; n++) {
739: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
740: }
741: }
742: return(0);
743: }
745: /*---------------------------------------------------------------------*/
748: /*
749: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
750: */
751: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
752: /*---------------------------------------------------------------------*/
753: {
754: PetscBool done = PETSC_FALSE;
755: PetscInt maxChild;
756: CharacteristicPointDA2D temp;
759: while ((root*2 <= bottom) && (!done)) {
760: if (root*2 == bottom) maxChild = root * 2;
761: else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
762: else maxChild = root * 2 + 1;
764: if (queue[root].proc < queue[maxChild].proc) {
765: temp = queue[root];
766: queue[root] = queue[maxChild];
767: queue[maxChild] = temp;
768: root = maxChild;
769: } else done = PETSC_TRUE;
770: }
771: return(0);
772: }
776: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
777: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
778: {
779: DMBoundaryType bx, by;
780: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
781: MPI_Comm comm;
782: PetscMPIInt rank;
783: PetscInt **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
784: PetscErrorCode ierr;
787: PetscObjectGetComm((PetscObject) da, &comm);
788: MPI_Comm_rank(comm, &rank);
789: DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);
791: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
792: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
794: neighbors[0] = rank;
795: rank = 0;
796: PetscMalloc1(PJ,&procs);
797: for (pj=0; pj<PJ; pj++) {
798: PetscMalloc1(PI,&(procs[pj]));
799: for (pi=0; pi<PI; pi++) {
800: procs[pj][pi] = rank;
801: rank++;
802: }
803: }
805: pi = neighbors[0] % PI;
806: pj = neighbors[0] / PI;
807: pim = pi-1; if (pim<0) pim=PI-1;
808: pip = (pi+1)%PI;
809: pjm = pj-1; if (pjm<0) pjm=PJ-1;
810: pjp = (pj+1)%PJ;
812: neighbors[1] = procs[pj] [pim];
813: neighbors[2] = procs[pjp][pim];
814: neighbors[3] = procs[pjp][pi];
815: neighbors[4] = procs[pjp][pip];
816: neighbors[5] = procs[pj] [pip];
817: neighbors[6] = procs[pjm][pip];
818: neighbors[7] = procs[pjm][pi];
819: neighbors[8] = procs[pjm][pim];
821: if (!IPeriodic) {
822: if (pi==0) neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
823: if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
824: }
826: if (!JPeriodic) {
827: if (pj==0) neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
828: if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
829: }
831: for (pj = 0; pj < PJ; pj++) {
832: PetscFree(procs[pj]);
833: }
834: PetscFree(procs);
835: return(0);
836: }
840: /*
841: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
842: 2 | 3 | 4
843: __|___|__
844: 1 | 0 | 5
845: __|___|__
846: 8 | 7 | 6
847: | |
848: */
849: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
850: {
851: DMDALocalInfo info;
852: PetscReal is,ie,js,je;
855: DMDAGetLocalInfo(da, &info);
856: is = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
857: js = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;
859: if (ir >= is && ir <= ie) { /* center column */
860: if (jr >= js && jr <= je) return 0;
861: else if (jr < js) return 7;
862: else return 3;
863: } else if (ir < is) { /* left column */
864: if (jr >= js && jr <= je) return 1;
865: else if (jr < js) return 8;
866: else return 2;
867: } else { /* right column */
868: if (jr >= js && jr <= je) return 5;
869: else if (jr < js) return 6;
870: else return 4;
871: }
872: }