Actual source code: characteristic.c
petsc-3.5.4 2015-05-23
2: #include <petsc-private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17: PetscInt DMDAGetNeighborRelative(DM, PassiveReal, PassiveReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
25: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
26: {
27: PetscBool iascii;
32: if (!viewer) {
33: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
34: }
38: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
39: if (!iascii) {
40: if (c->ops->view) {
41: (*c->ops->view)(c, viewer);
42: }
43: }
44: return(0);
45: }
49: PetscErrorCode CharacteristicDestroy(Characteristic *c)
50: {
54: if (!*c) return(0);
56: if (--((PetscObject)(*c))->refct > 0) return(0);
58: if ((*c)->ops->destroy) {
59: (*(*c)->ops->destroy)((*c));
60: }
61: MPI_Type_free(&(*c)->itemType);
62: PetscFree((*c)->queue);
63: PetscFree((*c)->queueLocal);
64: PetscFree((*c)->queueRemote);
65: PetscFree((*c)->neighbors);
66: PetscFree((*c)->needCount);
67: PetscFree((*c)->localOffsets);
68: PetscFree((*c)->fillCount);
69: PetscFree((*c)->remoteOffsets);
70: PetscFree((*c)->request);
71: PetscFree((*c)->status);
72: PetscHeaderDestroy(c);
73: return(0);
74: }
78: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
79: {
80: Characteristic newC;
85: *c = NULL;
86: CharacteristicInitializePackage();
88: PetscHeaderCreate(newC, _p_Characteristic, struct _CharacteristicOps, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "SemiLagrange", comm, CharacteristicDestroy, CharacteristicView);
89: PetscLogObjectCreate(newC);
90: *c = newC;
92: newC->structured = PETSC_TRUE;
93: newC->numIds = 0;
94: newC->velocityDA = NULL;
95: newC->velocity = NULL;
96: newC->velocityOld = NULL;
97: newC->numVelocityComp = 0;
98: newC->velocityComp = NULL;
99: newC->velocityInterp = NULL;
100: newC->velocityInterpLocal = NULL;
101: newC->velocityCtx = NULL;
102: newC->fieldDA = NULL;
103: newC->field = NULL;
104: newC->numFieldComp = 0;
105: newC->fieldComp = NULL;
106: newC->fieldInterp = NULL;
107: newC->fieldInterpLocal = NULL;
108: newC->fieldCtx = NULL;
109: newC->itemType = 0;
110: newC->queue = NULL;
111: newC->queueSize = 0;
112: newC->queueMax = 0;
113: newC->queueLocal = NULL;
114: newC->queueLocalSize = 0;
115: newC->queueLocalMax = 0;
116: newC->queueRemote = NULL;
117: newC->queueRemoteSize = 0;
118: newC->queueRemoteMax = 0;
119: newC->numNeighbors = 0;
120: newC->neighbors = NULL;
121: newC->needCount = NULL;
122: newC->localOffsets = NULL;
123: newC->fillCount = NULL;
124: newC->remoteOffsets = NULL;
125: newC->request = NULL;
126: newC->status = NULL;
127: return(0);
128: }
132: /*@C
133: CharacteristicSetType - Builds Characteristic for a particular solver.
135: Logically Collective on Characteristic
137: Input Parameters:
138: + c - the method of characteristics context
139: - type - a known method
141: Options Database Key:
142: . -characteristic_type <method> - Sets the method; use -help for a list
143: of available methods
145: Notes:
146: See "include/petsccharacteristic.h" for available methods
148: Normally, it is best to use the CharacteristicSetFromOptions() command and
149: then set the Characteristic type from the options database rather than by using
150: this routine. Using the options database provides the user with
151: maximum flexibility in evaluating the many different Krylov methods.
152: The CharacteristicSetType() routine is provided for those situations where it
153: is necessary to set the iterative solver independently of the command
154: line or options database. This might be the case, for example, when
155: the choice of iterative solver changes during the execution of the
156: program, and the user's application is taking responsibility for
157: choosing the appropriate method. In other words, this routine is
158: not for beginners.
160: Level: intermediate
162: .keywords: Characteristic, set, method
164: .seealso: CharacteristicType
166: @*/
167: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
168: {
169: PetscErrorCode ierr, (*r)(Characteristic);
170: PetscBool match;
176: PetscObjectTypeCompare((PetscObject) c, type, &match);
177: if (match) return(0);
179: if (c->data) {
180: /* destroy the old private Characteristic context */
181: (*c->ops->destroy)(c);
182: c->ops->destroy = NULL;
183: c->data = 0;
184: }
186: PetscFunctionListFind(CharacteristicList,type,&r);
187: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
188: c->setupcalled = 0;
189: (*r)(c);
190: PetscObjectChangeTypeName((PetscObject) c, type);
191: return(0);
192: }
196: /*@
197: CharacteristicSetUp - Sets up the internal data structures for the
198: later use of an iterative solver.
200: Collective on Characteristic
202: Input Parameter:
203: . ksp - iterative context obtained from CharacteristicCreate()
205: Level: developer
207: .keywords: Characteristic, setup
209: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
210: @*/
211: PetscErrorCode CharacteristicSetUp(Characteristic c)
212: {
218: if (!((PetscObject)c)->type_name) {
219: CharacteristicSetType(c, CHARACTERISTICDA);
220: }
222: if (c->setupcalled == 2) return(0);
224: PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
225: if (!c->setupcalled) {
226: (*c->ops->setup)(c);
227: }
228: PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
229: c->setupcalled = 2;
230: return(0);
231: }
235: /*@C
236: CharacteristicRegister - Adds a solver to the method of characteristics package.
238: Not Collective
240: Input Parameters:
241: + name_solver - name of a new user-defined solver
242: - routine_create - routine to create method context
244: Sample usage:
245: .vb
246: CharacteristicRegister("my_char", MyCharCreate);
247: .ve
249: Then, your Characteristic type can be chosen with the procedural interface via
250: .vb
251: CharacteristicCreate(MPI_Comm, Characteristic* &char);
252: CharacteristicSetType(char,"my_char");
253: .ve
254: or at runtime via the option
255: .vb
256: -characteristic_type my_char
257: .ve
259: Notes:
260: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
262: .keywords: Characteristic, register
264: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()
266: Level: advanced
267: @*/
268: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
269: {
273: PetscFunctionListAdd(&CharacteristicList,sname,function);
274: return(0);
275: }
279: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
280: {
282: c->velocityDA = da;
283: c->velocity = v;
284: c->velocityOld = vOld;
285: c->numVelocityComp = numComponents;
286: c->velocityComp = components;
287: c->velocityInterp = interp;
288: c->velocityCtx = ctx;
289: return(0);
290: }
294: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
295: {
297: c->velocityDA = da;
298: c->velocity = v;
299: c->velocityOld = vOld;
300: c->numVelocityComp = numComponents;
301: c->velocityComp = components;
302: c->velocityInterpLocal = interp;
303: c->velocityCtx = ctx;
304: return(0);
305: }
309: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
310: {
312: #if 0
313: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
314: #endif
315: c->fieldDA = da;
316: c->field = v;
317: c->numFieldComp = numComponents;
318: c->fieldComp = components;
319: c->fieldInterp = interp;
320: c->fieldCtx = ctx;
321: return(0);
322: }
326: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
327: {
329: #if 0
330: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
331: #endif
332: c->fieldDA = da;
333: c->field = v;
334: c->numFieldComp = numComponents;
335: c->fieldComp = components;
336: c->fieldInterpLocal = interp;
337: c->fieldCtx = ctx;
338: return(0);
339: }
343: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
344: {
345: CharacteristicPointDA2D Qi;
346: DM da = c->velocityDA;
347: Vec velocityLocal, velocityLocalOld;
348: Vec fieldLocal;
349: DMDALocalInfo info;
350: PetscScalar **solArray;
351: void *velocityArray;
352: void *velocityArrayOld;
353: void *fieldArray;
354: PassiveScalar *interpIndices;
355: PassiveScalar *velocityValues, *velocityValuesOld;
356: PassiveScalar *fieldValues;
357: PetscMPIInt rank;
358: PetscInt dim;
359: PetscMPIInt neighbors[9];
360: PetscInt dof;
361: PetscInt gx, gy;
362: PetscInt n, is, ie, js, je, comp;
363: PetscErrorCode ierr;
366: c->queueSize = 0;
367: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
368: DMDAGetNeighborsRank(da, neighbors);
369: CharacteristicSetNeighbors(c, 9, neighbors);
370: CharacteristicSetUp(c);
371: /* global and local grid info */
372: DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
373: DMDAGetLocalInfo(da, &info);
374: is = info.xs; ie = info.xs+info.xm;
375: js = info.ys; je = info.ys+info.ym;
376: /* Allocation */
377: PetscMalloc1(dim, &interpIndices);
378: PetscMalloc1(c->numVelocityComp, &velocityValues);
379: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
380: PetscMalloc1(c->numFieldComp, &fieldValues);
381: PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);
383: /* -----------------------------------------------------------------------
384: PART 1, AT t-dt/2
385: -----------------------------------------------------------------------*/
386: PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
387: /* GET POSITION AT HALF TIME IN THE PAST */
388: if (c->velocityInterpLocal) {
389: DMGetLocalVector(c->velocityDA, &velocityLocal);
390: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
391: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
392: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
393: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
394: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
395: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
396: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
397: }
398: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
399: for (Qi.j = js; Qi.j < je; Qi.j++) {
400: for (Qi.i = is; Qi.i < ie; Qi.i++) {
401: interpIndices[0] = Qi.i;
402: interpIndices[1] = Qi.j;
403: if (c->velocityInterpLocal) c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
404: else c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
405: Qi.x = Qi.i - velocityValues[0]*dt/2.0;
406: Qi.y = Qi.j - velocityValues[1]*dt/2.0;
408: /* Determine whether the position at t - dt/2 is local */
409: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
411: /* Check for Periodic boundaries and move all periodic points back onto the domain */
412: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
413: CharacteristicAddPoint(c, &Qi);
414: }
415: }
416: PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);
418: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
419: CharacteristicSendCoordinatesBegin(c);
420: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
422: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
423: /* Calculate velocity at t_n+1/2 (local values) */
424: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
425: for (n = 0; n < c->queueSize; n++) {
426: Qi = c->queue[n];
427: if (c->neighbors[Qi.proc] == rank) {
428: interpIndices[0] = Qi.x;
429: interpIndices[1] = Qi.y;
430: if (c->velocityInterpLocal) {
431: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
432: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
433: } else {
434: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
435: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
436: }
437: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
438: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
439: }
440: c->queue[n] = Qi;
441: }
442: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
444: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
445: CharacteristicSendCoordinatesEnd(c);
446: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
449: /* Calculate velocity at t_n+1/2 (fill remote requests) */
450: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
451: PetscInfo1(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
452: for (n = 0; n < c->queueRemoteSize; n++) {
453: Qi = c->queueRemote[n];
454: interpIndices[0] = Qi.x;
455: interpIndices[1] = Qi.y;
456: if (c->velocityInterpLocal) {
457: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
458: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
459: } else {
460: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
461: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
462: }
463: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
464: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
465: c->queueRemote[n] = Qi;
466: }
467: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
468: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
469: CharacteristicGetValuesBegin(c);
470: CharacteristicGetValuesEnd(c);
471: if (c->velocityInterpLocal) {
472: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
473: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
474: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
475: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
476: }
477: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
479: /* -----------------------------------------------------------------------
480: PART 2, AT t-dt
481: -----------------------------------------------------------------------*/
483: /* GET POSITION AT t_n (local values) */
484: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
485: PetscInfo(NULL, "Calculating position at t_{n}\n");
486: for (n = 0; n < c->queueSize; n++) {
487: Qi = c->queue[n];
488: Qi.x = Qi.i - Qi.x*dt;
489: Qi.y = Qi.j - Qi.y*dt;
491: /* Determine whether the position at t-dt is local */
492: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
494: /* Check for Periodic boundaries and move all periodic points back onto the domain */
495: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
497: c->queue[n] = Qi;
498: }
499: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
501: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
502: CharacteristicSendCoordinatesBegin(c);
503: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
505: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
506: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
507: if (c->fieldInterpLocal) {
508: DMGetLocalVector(c->fieldDA, &fieldLocal);
509: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
510: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
511: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
512: }
513: PetscInfo(NULL, "Calculating local field at t_{n}\n");
514: for (n = 0; n < c->queueSize; n++) {
515: if (c->neighbors[c->queue[n].proc] == rank) {
516: interpIndices[0] = c->queue[n].x;
517: interpIndices[1] = c->queue[n].y;
518: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
519: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
520: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
521: }
522: }
523: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
525: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
526: CharacteristicSendCoordinatesEnd(c);
527: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
529: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
530: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
531: PetscInfo1(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
532: for (n = 0; n < c->queueRemoteSize; n++) {
533: interpIndices[0] = c->queueRemote[n].x;
534: interpIndices[1] = c->queueRemote[n].y;
536: /* for debugging purposes */
537: if (1) { /* hacked bounds test...let's do better */
538: PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
540: if ((im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar) js - 1.) || (jm > (PetscScalar) je)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
541: }
543: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
544: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
545: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
546: }
547: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
549: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
550: CharacteristicGetValuesBegin(c);
551: CharacteristicGetValuesEnd(c);
552: if (c->fieldInterpLocal) {
553: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
554: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
555: }
556: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
558: /* Return field of characteristics at t_n-1 */
559: PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
560: DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
561: DMDAVecGetArray(c->fieldDA, solution, &solArray);
562: for (n = 0; n < c->queueSize; n++) {
563: Qi = c->queue[n];
564: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
565: }
566: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
567: PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
568: PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);
570: /* Cleanup */
571: PetscFree(interpIndices);
572: PetscFree(velocityValues);
573: PetscFree(velocityValuesOld);
574: PetscFree(fieldValues);
575: return(0);
576: }
580: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
581: {
585: c->numNeighbors = numNeighbors;
586: PetscFree(c->neighbors);
587: PetscMalloc1(numNeighbors, &c->neighbors);
588: PetscMemcpy(c->neighbors, neighbors, numNeighbors * sizeof(PetscMPIInt));
589: return(0);
590: }
594: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
595: {
597: if (c->queueSize >= c->queueMax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
598: c->queue[c->queueSize++] = *point;
599: return(0);
600: }
604: int CharacteristicSendCoordinatesBegin(Characteristic c)
605: {
606: PetscMPIInt rank, tag = 121;
607: PetscInt i, n;
611: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
612: CharacteristicHeapSort(c, c->queue, c->queueSize);
613: PetscMemzero(c->needCount, c->numNeighbors * sizeof(PetscInt));
614: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
615: c->fillCount[0] = 0;
616: for (n = 1; n < c->numNeighbors; n++) {
617: MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
618: }
619: for (n = 1; n < c->numNeighbors; n++) {
620: MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
621: }
622: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
623: /* Initialize the remote queue */
624: c->queueLocalMax = c->localOffsets[0] = 0;
625: c->queueRemoteMax = c->remoteOffsets[0] = 0;
626: for (n = 1; n < c->numNeighbors; n++) {
627: c->remoteOffsets[n] = c->queueRemoteMax;
628: c->queueRemoteMax += c->fillCount[n];
629: c->localOffsets[n] = c->queueLocalMax;
630: c->queueLocalMax += c->needCount[n];
631: }
632: /* HACK BEGIN */
633: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
634: c->needCount[0] = 0;
635: /* HACK END */
636: if (c->queueRemoteMax) {
637: PetscMalloc(sizeof(CharacteristicPointDA2D) * c->queueRemoteMax, &c->queueRemote);
638: } else c->queueRemote = NULL;
639: c->queueRemoteSize = c->queueRemoteMax;
641: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
642: for (n = 1; n < c->numNeighbors; n++) {
643: PetscInfo2(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
644: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
645: }
646: for (n = 1; n < c->numNeighbors; n++) {
647: PetscInfo2(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
648: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
649: }
650: return(0);
651: }
655: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
656: {
657: #if 0
658: PetscMPIInt rank;
659: PetscInt n;
660: #endif
664: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
665: #if 0
666: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
667: for (n = 0; n < c->queueRemoteSize; n++) {
668: if (c->neighbors[c->queueRemote[n].proc] == rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
669: }
670: #endif
671: return(0);
672: }
676: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
677: {
678: PetscMPIInt tag = 121;
679: PetscInt n;
683: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
684: for (n = 1; n < c->numNeighbors; n++) {
685: MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
686: }
687: for (n = 1; n < c->numNeighbors; n++) {
688: MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
689: }
690: return(0);
691: }
695: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
696: {
700: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
701: /* Free queue of requests from other procs */
702: PetscFree(c->queueRemote);
703: return(0);
704: }
706: /*---------------------------------------------------------------------*/
709: /*
710: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
711: */
712: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
713: /*---------------------------------------------------------------------*/
714: {
715: PetscErrorCode ierr;
716: CharacteristicPointDA2D temp;
717: PetscInt n;
720: if (0) { /* Check the order of the queue before sorting */
721: PetscInfo(NULL, "Before Heap sort\n");
722: for (n=0; n<size; n++) {
723: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
724: }
725: }
727: /* SORTING PHASE */
728: for (n = (size / 2)-1; n >= 0; n--) {
729: CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
730: }
731: for (n = size-1; n >= 1; n--) {
732: temp = queue[0];
733: queue[0] = queue[n];
734: queue[n] = temp;
735: CharacteristicSiftDown(c, queue, 0, n-1);
736: }
737: if (0) { /* Check the order of the queue after sorting */
738: PetscInfo(NULL, "Avter Heap sort\n");
739: for (n=0; n<size; n++) {
740: PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
741: }
742: }
743: return(0);
744: }
746: /*---------------------------------------------------------------------*/
749: /*
750: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
751: */
752: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
753: /*---------------------------------------------------------------------*/
754: {
755: PetscBool done = PETSC_FALSE;
756: PetscInt maxChild;
757: CharacteristicPointDA2D temp;
760: while ((root*2 <= bottom) && (!done)) {
761: if (root*2 == bottom) maxChild = root * 2;
762: else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
763: else maxChild = root * 2 + 1;
765: if (queue[root].proc < queue[maxChild].proc) {
766: temp = queue[root];
767: queue[root] = queue[maxChild];
768: queue[maxChild] = temp;
769: root = maxChild;
770: } else done = PETSC_TRUE;
771: }
772: return(0);
773: }
777: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
778: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
779: {
780: DMBoundaryType bx, by;
781: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
782: MPI_Comm comm;
783: PetscMPIInt rank;
784: PetscInt **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
785: PetscErrorCode ierr;
788: PetscObjectGetComm((PetscObject) da, &comm);
789: MPI_Comm_rank(comm, &rank);
790: DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);
792: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
793: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
795: neighbors[0] = rank;
796: rank = 0;
797: PetscMalloc(sizeof(PetscInt*)*PJ,&procs);
798: for (pj=0; pj<PJ; pj++) {
799: PetscMalloc(sizeof(PetscInt)*PI,&(procs[pj]));
800: for (pi=0; pi<PI; pi++) {
801: procs[pj][pi] = rank;
802: rank++;
803: }
804: }
806: pi = neighbors[0] % PI;
807: pj = neighbors[0] / PI;
808: pim = pi-1; if (pim<0) pim=PI-1;
809: pip = (pi+1)%PI;
810: pjm = pj-1; if (pjm<0) pjm=PJ-1;
811: pjp = (pj+1)%PJ;
813: neighbors[1] = procs[pj] [pim];
814: neighbors[2] = procs[pjp][pim];
815: neighbors[3] = procs[pjp][pi];
816: neighbors[4] = procs[pjp][pip];
817: neighbors[5] = procs[pj] [pip];
818: neighbors[6] = procs[pjm][pip];
819: neighbors[7] = procs[pjm][pi];
820: neighbors[8] = procs[pjm][pim];
822: if (!IPeriodic) {
823: if (pi==0) neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
824: if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
825: }
827: if (!JPeriodic) {
828: if (pj==0) neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
829: if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
830: }
832: for (pj = 0; pj < PJ; pj++) {
833: PetscFree(procs[pj]);
834: }
835: PetscFree(procs);
836: return(0);
837: }
841: /*
842: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
843: 2 | 3 | 4
844: __|___|__
845: 1 | 0 | 5
846: __|___|__
847: 8 | 7 | 6
848: | |
849: */
850: PetscInt DMDAGetNeighborRelative(DM da, PassiveReal ir, PassiveReal jr)
851: {
852: DMDALocalInfo info;
853: PassiveReal is,ie,js,je;
856: DMDAGetLocalInfo(da, &info);
857: is = (PassiveReal) info.xs - 0.5; ie = (PassiveReal) info.xs + info.xm - 0.5;
858: js = (PassiveReal) info.ys - 0.5; je = (PassiveReal) info.ys + info.ym - 0.5;
860: if (ir >= is && ir <= ie) { /* center column */
861: if (jr >= js && jr <= je) return 0;
862: else if (jr < js) return 7;
863: else return 3;
864: } else if (ir < is) { /* left column */
865: if (jr >= js && jr <= je) return 1;
866: else if (jr < js) return 8;
867: else return 2;
868: } else { /* right column */
869: if (jr >= js && jr <= je) return 5;
870: else if (jr < js) return 6;
871: else return 4;
872: }
873: }