Actual source code: characteristic.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2:  #include <petsc/private/characteristicimpl.h>
  3:  #include <petscdmda.h>
  4:  #include <petscviewer.h>

  6: PetscClassId  CHARACTERISTIC_CLASSID;
  7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
  8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
  9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
 10: /*
 11:    Contains the list of registered characteristic routines
 12: */
 13: PetscFunctionList CharacteristicList              = NULL;
 14: PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;

 16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
 17: PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
 18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);

 20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
 21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);

 23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
 24: {
 25:   PetscBool      iascii;

 30:   if (!viewer) {
 31:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
 32:   }

 36:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
 37:   if (!iascii) {
 38:     if (c->ops->view) {
 39:       (*c->ops->view)(c, viewer);
 40:     }
 41:   }
 42:   return(0);
 43: }

 45: PetscErrorCode CharacteristicDestroy(Characteristic *c)
 46: {

 50:   if (!*c) return(0);
 52:   if (--((PetscObject)(*c))->refct > 0) return(0);

 54:   if ((*c)->ops->destroy) {
 55:     (*(*c)->ops->destroy)((*c));
 56:   }
 57:   MPI_Type_free(&(*c)->itemType);
 58:   PetscFree((*c)->queue);
 59:   PetscFree((*c)->queueLocal);
 60:   PetscFree((*c)->queueRemote);
 61:   PetscFree((*c)->neighbors);
 62:   PetscFree((*c)->needCount);
 63:   PetscFree((*c)->localOffsets);
 64:   PetscFree((*c)->fillCount);
 65:   PetscFree((*c)->remoteOffsets);
 66:   PetscFree((*c)->request);
 67:   PetscFree((*c)->status);
 68:   PetscHeaderDestroy(c);
 69:   return(0);
 70: }

 72: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
 73: {
 74:   Characteristic newC;

 79:   *c = NULL;
 80:   CharacteristicInitializePackage();

 82:   PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView);
 83:   *c   = newC;

 85:   newC->structured          = PETSC_TRUE;
 86:   newC->numIds              = 0;
 87:   newC->velocityDA          = NULL;
 88:   newC->velocity            = NULL;
 89:   newC->velocityOld         = NULL;
 90:   newC->numVelocityComp     = 0;
 91:   newC->velocityComp        = NULL;
 92:   newC->velocityInterp      = NULL;
 93:   newC->velocityInterpLocal = NULL;
 94:   newC->velocityCtx         = NULL;
 95:   newC->fieldDA             = NULL;
 96:   newC->field               = NULL;
 97:   newC->numFieldComp        = 0;
 98:   newC->fieldComp           = NULL;
 99:   newC->fieldInterp         = NULL;
100:   newC->fieldInterpLocal    = NULL;
101:   newC->fieldCtx            = NULL;
102:   newC->itemType            = 0;
103:   newC->queue               = NULL;
104:   newC->queueSize           = 0;
105:   newC->queueMax            = 0;
106:   newC->queueLocal          = NULL;
107:   newC->queueLocalSize      = 0;
108:   newC->queueLocalMax       = 0;
109:   newC->queueRemote         = NULL;
110:   newC->queueRemoteSize     = 0;
111:   newC->queueRemoteMax      = 0;
112:   newC->numNeighbors        = 0;
113:   newC->neighbors           = NULL;
114:   newC->needCount           = NULL;
115:   newC->localOffsets        = NULL;
116:   newC->fillCount           = NULL;
117:   newC->remoteOffsets       = NULL;
118:   newC->request             = NULL;
119:   newC->status              = NULL;
120:   return(0);
121: }

123: /*@C
124:    CharacteristicSetType - Builds Characteristic for a particular solver.

126:    Logically Collective on Characteristic

128:    Input Parameters:
129: +  c    - the method of characteristics context
130: -  type - a known method

132:    Options Database Key:
133: .  -characteristic_type <method> - Sets the method; use -help for a list
134:     of available methods

136:    Notes:
137:    See "include/petsccharacteristic.h" for available methods

139:   Normally, it is best to use the CharacteristicSetFromOptions() command and
140:   then set the Characteristic type from the options database rather than by using
141:   this routine.  Using the options database provides the user with
142:   maximum flexibility in evaluating the many different Krylov methods.
143:   The CharacteristicSetType() routine is provided for those situations where it
144:   is necessary to set the iterative solver independently of the command
145:   line or options database.  This might be the case, for example, when
146:   the choice of iterative solver changes during the execution of the
147:   program, and the user's Section 1.5 Writing Application Codes with PETSc is taking responsibility for
148:   choosing the appropriate method.  In other words, this routine is
149:   not for beginners.

151:   Level: intermediate

153: .seealso: CharacteristicType

155: @*/
156: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
157: {
158:   PetscErrorCode ierr, (*r)(Characteristic);
159:   PetscBool      match;


165:   PetscObjectTypeCompare((PetscObject) c, type, &match);
166:   if (match) return(0);

168:   if (c->data) {
169:     /* destroy the old private Characteristic context */
170:     (*c->ops->destroy)(c);
171:     c->ops->destroy = NULL;
172:     c->data         = 0;
173:   }

175:    PetscFunctionListFind(CharacteristicList,type,&r);
176:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
177:   c->setupcalled = 0;
178:   (*r)(c);
179:   PetscObjectChangeTypeName((PetscObject) c, type);
180:   return(0);
181: }

183: /*@
184:    CharacteristicSetUp - Sets up the internal data structures for the
185:    later use of an iterative solver.

187:    Collective on Characteristic

189:    Input Parameter:
190: .  ksp   - iterative context obtained from CharacteristicCreate()

192:    Level: developer

194: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
195: @*/
196: PetscErrorCode CharacteristicSetUp(Characteristic c)
197: {


203:   if (!((PetscObject)c)->type_name) {
204:     CharacteristicSetType(c, CHARACTERISTICDA);
205:   }

207:   if (c->setupcalled == 2) return(0);

209:   PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
210:   if (!c->setupcalled) {
211:     (*c->ops->setup)(c);
212:   }
213:   PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
214:   c->setupcalled = 2;
215:   return(0);
216: }

218: /*@C
219:   CharacteristicRegister -  Adds a solver to the method of characteristics package.

221:    Not Collective

223:    Input Parameters:
224: +  name_solver - name of a new user-defined solver
225: -  routine_create - routine to create method context

227:   Sample usage:
228: .vb
229:     CharacteristicRegister("my_char", MyCharCreate);
230: .ve

232:   Then, your Characteristic type can be chosen with the procedural interface via
233: .vb
234:     CharacteristicCreate(MPI_Comm, Characteristic* &char);
235:     CharacteristicSetType(char,"my_char");
236: .ve
237:    or at runtime via the option
238: .vb
239:     -characteristic_type my_char
240: .ve

242:    Notes:
243:    CharacteristicRegister() may be called multiple times to add several user-defined solvers.

245: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()

247:   Level: advanced
248: @*/
249: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
250: {

254:   CharacteristicInitializePackage();
255:   PetscFunctionListAdd(&CharacteristicList,sname,function);
256:   return(0);
257: }

259: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
260: {
262:   c->velocityDA      = da;
263:   c->velocity        = v;
264:   c->velocityOld     = vOld;
265:   c->numVelocityComp = numComponents;
266:   c->velocityComp    = components;
267:   c->velocityInterp  = interp;
268:   c->velocityCtx     = ctx;
269:   return(0);
270: }

272: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
273: {
275:   c->velocityDA          = da;
276:   c->velocity            = v;
277:   c->velocityOld         = vOld;
278:   c->numVelocityComp     = numComponents;
279:   c->velocityComp        = components;
280:   c->velocityInterpLocal = interp;
281:   c->velocityCtx         = ctx;
282:   return(0);
283: }

285: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
286: {
288: #if 0
289:   if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
290: #endif
291:   c->fieldDA      = da;
292:   c->field        = v;
293:   c->numFieldComp = numComponents;
294:   c->fieldComp    = components;
295:   c->fieldInterp  = interp;
296:   c->fieldCtx     = ctx;
297:   return(0);
298: }

300: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
301: {
303: #if 0
304:   if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
305: #endif
306:   c->fieldDA          = da;
307:   c->field            = v;
308:   c->numFieldComp     = numComponents;
309:   c->fieldComp        = components;
310:   c->fieldInterpLocal = interp;
311:   c->fieldCtx         = ctx;
312:   return(0);
313: }

315: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
316: {
317:   CharacteristicPointDA2D Qi;
318:   DM                      da = c->velocityDA;
319:   Vec                     velocityLocal, velocityLocalOld;
320:   Vec                     fieldLocal;
321:   DMDALocalInfo           info;
322:   PetscScalar             **solArray;
323:   void                    *velocityArray;
324:   void                    *velocityArrayOld;
325:   void                    *fieldArray;
326:   PetscScalar             *interpIndices;
327:   PetscScalar             *velocityValues, *velocityValuesOld;
328:   PetscScalar             *fieldValues;
329:   PetscMPIInt             rank;
330:   PetscInt                dim;
331:   PetscMPIInt             neighbors[9];
332:   PetscInt                dof;
333:   PetscInt                gx, gy;
334:   PetscInt                n, is, ie, js, je, comp;
335:   PetscErrorCode          ierr;

338:   c->queueSize = 0;
339:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
340:   DMDAGetNeighborsRank(da, neighbors);
341:   CharacteristicSetNeighbors(c, 9, neighbors);
342:   CharacteristicSetUp(c);
343:   /* global and local grid info */
344:   DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
345:   DMDAGetLocalInfo(da, &info);
346:   is   = info.xs;          ie   = info.xs+info.xm;
347:   js   = info.ys;          je   = info.ys+info.ym;
348:   /* Allocation */
349:   PetscMalloc1(dim,                &interpIndices);
350:   PetscMalloc1(c->numVelocityComp, &velocityValues);
351:   PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
352:   PetscMalloc1(c->numFieldComp,    &fieldValues);
353:   PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);

355:   /* -----------------------------------------------------------------------
356:      PART 1, AT t-dt/2
357:      -----------------------------------------------------------------------*/
358:   PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
359:   /* GET POSITION AT HALF TIME IN THE PAST */
360:   if (c->velocityInterpLocal) {
361:     DMGetLocalVector(c->velocityDA, &velocityLocal);
362:     DMGetLocalVector(c->velocityDA, &velocityLocalOld);
363:     DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
364:     DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
365:     DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
366:     DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
367:     DMDAVecGetArray(c->velocityDA, velocityLocal,    &velocityArray);
368:     DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
369:   }
370:   PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
371:   for (Qi.j = js; Qi.j < je; Qi.j++) {
372:     for (Qi.i = is; Qi.i < ie; Qi.i++) {
373:       interpIndices[0] = Qi.i;
374:       interpIndices[1] = Qi.j;
375:       if (c->velocityInterpLocal) {c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
376:       else {c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);}
377:       Qi.x = Qi.i - velocityValues[0]*dt/2.0;
378:       Qi.y = Qi.j - velocityValues[1]*dt/2.0;

380:       /* Determine whether the position at t - dt/2 is local */
381:       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

383:       /* Check for Periodic boundaries and move all periodic points back onto the domain */
384:       DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
385:       CharacteristicAddPoint(c, &Qi);
386:     }
387:   }
388:   PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);

390:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
391:   CharacteristicSendCoordinatesBegin(c);
392:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);

394:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
395:   /* Calculate velocity at t_n+1/2 (local values) */
396:   PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
397:   for (n = 0; n < c->queueSize; n++) {
398:     Qi = c->queue[n];
399:     if (c->neighbors[Qi.proc] == rank) {
400:       interpIndices[0] = Qi.x;
401:       interpIndices[1] = Qi.y;
402:       if (c->velocityInterpLocal) {
403:         c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
404:         c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
405:       } else {
406:         c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
407:         c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
408:       }
409:       Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
410:       Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
411:     }
412:     c->queue[n] = Qi;
413:   }
414:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);

416:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
417:   CharacteristicSendCoordinatesEnd(c);
418:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);


421:   /* Calculate velocity at t_n+1/2 (fill remote requests) */
422:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
423:   PetscInfo1(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
424:   for (n = 0; n < c->queueRemoteSize; n++) {
425:     Qi = c->queueRemote[n];
426:     interpIndices[0] = Qi.x;
427:     interpIndices[1] = Qi.y;
428:     if (c->velocityInterpLocal) {
429:       c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
430:       c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
431:     } else {
432:       c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
433:       c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
434:     }
435:     Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
436:     Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
437:     c->queueRemote[n] = Qi;
438:   }
439:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
440:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
441:   CharacteristicGetValuesBegin(c);
442:   CharacteristicGetValuesEnd(c);
443:   if (c->velocityInterpLocal) {
444:     DMDAVecRestoreArray(c->velocityDA, velocityLocal,    &velocityArray);
445:     DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
446:     DMRestoreLocalVector(c->velocityDA, &velocityLocal);
447:     DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
448:   }
449:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);

451:   /* -----------------------------------------------------------------------
452:      PART 2, AT t-dt
453:      -----------------------------------------------------------------------*/

455:   /* GET POSITION AT t_n (local values) */
456:   PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
457:   PetscInfo(NULL, "Calculating position at t_{n}\n");
458:   for (n = 0; n < c->queueSize; n++) {
459:     Qi   = c->queue[n];
460:     Qi.x = Qi.i - Qi.x*dt;
461:     Qi.y = Qi.j - Qi.y*dt;

463:     /* Determine whether the position at t-dt is local */
464:     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

466:     /* Check for Periodic boundaries and move all periodic points back onto the domain */
467:     DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));

469:     c->queue[n] = Qi;
470:   }
471:   PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);

473:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
474:   CharacteristicSendCoordinatesBegin(c);
475:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

477:   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
478:   PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
479:   if (c->fieldInterpLocal) {
480:     DMGetLocalVector(c->fieldDA, &fieldLocal);
481:     DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
482:     DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
483:     DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
484:   }
485:   PetscInfo(NULL, "Calculating local field at t_{n}\n");
486:   for (n = 0; n < c->queueSize; n++) {
487:     if (c->neighbors[c->queue[n].proc] == rank) {
488:       interpIndices[0] = c->queue[n].x;
489:       interpIndices[1] = c->queue[n].y;
490:       if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
491:       else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
492:       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
493:     }
494:   }
495:   PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);

497:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
498:   CharacteristicSendCoordinatesEnd(c);
499:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

501:   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
502:   PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
503:   PetscInfo1(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
504:   for (n = 0; n < c->queueRemoteSize; n++) {
505:     interpIndices[0] = c->queueRemote[n].x;
506:     interpIndices[1] = c->queueRemote[n].y;

508:     /* for debugging purposes */
509:     if (1) { /* hacked bounds test...let's do better */
510:       PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];

512:       if ((im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar)  js - 1.) || (jm > (PetscScalar) je)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
513:     }

515:     if (c->fieldInterpLocal) {c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
516:     else {c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);}
517:     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
518:   }
519:   PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);

521:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
522:   CharacteristicGetValuesBegin(c);
523:   CharacteristicGetValuesEnd(c);
524:   if (c->fieldInterpLocal) {
525:     DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
526:     DMRestoreLocalVector(c->fieldDA, &fieldLocal);
527:   }
528:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

530:   /* Return field of characteristics at t_n-1 */
531:   PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
532:   DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
533:   DMDAVecGetArray(c->fieldDA, solution, &solArray);
534:   for (n = 0; n < c->queueSize; n++) {
535:     Qi = c->queue[n];
536:     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
537:   }
538:   DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
539:   PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
540:   PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);

542:   /* Cleanup */
543:   PetscFree(interpIndices);
544:   PetscFree(velocityValues);
545:   PetscFree(velocityValuesOld);
546:   PetscFree(fieldValues);
547:   return(0);
548: }

550: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
551: {

555:   c->numNeighbors = numNeighbors;
556:   PetscFree(c->neighbors);
557:   PetscMalloc1(numNeighbors, &c->neighbors);
558:   PetscArraycpy(c->neighbors, neighbors, numNeighbors);
559:   return(0);
560: }

562: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
563: {
565:   if (c->queueSize >= c->queueMax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
566:   c->queue[c->queueSize++] = *point;
567:   return(0);
568: }

570: int CharacteristicSendCoordinatesBegin(Characteristic c)
571: {
572:   PetscMPIInt    rank, tag = 121;
573:   PetscInt       i, n;

577:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
578:   CharacteristicHeapSort(c, c->queue, c->queueSize);
579:   PetscArrayzero(c->needCount, c->numNeighbors);
580:   for (i = 0;  i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
581:   c->fillCount[0] = 0;
582:   for (n = 1; n < c->numNeighbors; n++) {
583:     MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
584:   }
585:   for (n = 1; n < c->numNeighbors; n++) {
586:     MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
587:   }
588:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
589:   /* Initialize the remote queue */
590:   c->queueLocalMax  = c->localOffsets[0]  = 0;
591:   c->queueRemoteMax = c->remoteOffsets[0] = 0;
592:   for (n = 1; n < c->numNeighbors; n++) {
593:     c->remoteOffsets[n] = c->queueRemoteMax;
594:     c->queueRemoteMax  += c->fillCount[n];
595:     c->localOffsets[n]  = c->queueLocalMax;
596:     c->queueLocalMax   += c->needCount[n];
597:   }
598:   /* HACK BEGIN */
599:   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
600:   c->needCount[0] = 0;
601:   /* HACK END */
602:   if (c->queueRemoteMax) {
603:     PetscMalloc1(c->queueRemoteMax, &c->queueRemote);
604:   } else c->queueRemote = NULL;
605:   c->queueRemoteSize = c->queueRemoteMax;

607:   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
608:   for (n = 1; n < c->numNeighbors; n++) {
609:     PetscInfo2(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
610:     MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
611:   }
612:   for (n = 1; n < c->numNeighbors; n++) {
613:     PetscInfo2(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
614:     MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
615:   }
616:   return(0);
617: }

619: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
620: {
621: #if 0
622:   PetscMPIInt rank;
623:   PetscInt    n;
624: #endif

628:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
629: #if 0
630:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
631:   for (n = 0; n < c->queueRemoteSize; n++) {
632:     if (c->neighbors[c->queueRemote[n].proc] == rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
633:   }
634: #endif
635:   return(0);
636: }

638: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
639: {
640:   PetscMPIInt    tag = 121;
641:   PetscInt       n;

645:   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
646:   for (n = 1; n < c->numNeighbors; n++) {
647:     MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
648:   }
649:   for (n = 1; n < c->numNeighbors; n++) {
650:     MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
651:   }
652:   return(0);
653: }

655: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
656: {

660:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
661:   /* Free queue of requests from other procs */
662:   PetscFree(c->queueRemote);
663:   return(0);
664: }

666: /*---------------------------------------------------------------------*/
667: /*
668:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
669: */
670: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
671: /*---------------------------------------------------------------------*/
672: {
673:   PetscErrorCode          ierr;
674:   CharacteristicPointDA2D temp;
675:   PetscInt                n;

678:   if (0) { /* Check the order of the queue before sorting */
679:     PetscInfo(NULL, "Before Heap sort\n");
680:     for (n=0; n<size; n++) {
681:       PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
682:     }
683:   }

685:   /* SORTING PHASE */
686:   for (n = (size / 2)-1; n >= 0; n--) {
687:     CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
688:   }
689:   for (n = size-1; n >= 1; n--) {
690:     temp     = queue[0];
691:     queue[0] = queue[n];
692:     queue[n] = temp;
693:     CharacteristicSiftDown(c, queue, 0, n-1);
694:   }
695:   if (0) { /* Check the order of the queue after sorting */
696:     PetscInfo(NULL, "Avter  Heap sort\n");
697:     for (n=0; n<size; n++) {
698:       PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
699:     }
700:   }
701:   return(0);
702: }

704: /*---------------------------------------------------------------------*/
705: /*
706:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
707: */
708: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
709: /*---------------------------------------------------------------------*/
710: {
711:   PetscBool               done = PETSC_FALSE;
712:   PetscInt                maxChild;
713:   CharacteristicPointDA2D temp;

716:   while ((root*2 <= bottom) && (!done)) {
717:     if (root*2 == bottom) maxChild = root * 2;
718:     else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
719:     else maxChild = root * 2 + 1;

721:     if (queue[root].proc < queue[maxChild].proc) {
722:       temp            = queue[root];
723:       queue[root]     = queue[maxChild];
724:       queue[maxChild] = temp;
725:       root            = maxChild;
726:     } else done = PETSC_TRUE;
727:   }
728:   return(0);
729: }

731: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
732: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
733: {
734:   DMBoundaryType   bx, by;
735:   PetscBool        IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
736:   MPI_Comm         comm;
737:   PetscMPIInt      rank;
738:   PetscInt         **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
739:   PetscErrorCode   ierr;

742:   PetscObjectGetComm((PetscObject) da, &comm);
743:   MPI_Comm_rank(comm, &rank);
744:   DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);

746:   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
747:   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;

749:   neighbors[0] = rank;
750:   rank = 0;
751:   PetscMalloc1(PJ,&procs);
752:   for (pj=0; pj<PJ; pj++) {
753:     PetscMalloc1(PI,&(procs[pj]));
754:     for (pi=0; pi<PI; pi++) {
755:       procs[pj][pi] = rank;
756:       rank++;
757:     }
758:   }

760:   pi  = neighbors[0] % PI;
761:   pj  = neighbors[0] / PI;
762:   pim = pi-1;  if (pim<0) pim=PI-1;
763:   pip = (pi+1)%PI;
764:   pjm = pj-1;  if (pjm<0) pjm=PJ-1;
765:   pjp = (pj+1)%PJ;

767:   neighbors[1] = procs[pj] [pim];
768:   neighbors[2] = procs[pjp][pim];
769:   neighbors[3] = procs[pjp][pi];
770:   neighbors[4] = procs[pjp][pip];
771:   neighbors[5] = procs[pj] [pip];
772:   neighbors[6] = procs[pjm][pip];
773:   neighbors[7] = procs[pjm][pi];
774:   neighbors[8] = procs[pjm][pim];

776:   if (!IPeriodic) {
777:     if (pi==0)    neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
778:     if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
779:   }

781:   if (!JPeriodic) {
782:     if (pj==0)    neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
783:     if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
784:   }

786:   for (pj = 0; pj < PJ; pj++) {
787:     PetscFree(procs[pj]);
788:   }
789:   PetscFree(procs);
790:   return(0);
791: }

793: /*
794:   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
795:     2 | 3 | 4
796:     __|___|__
797:     1 | 0 | 5
798:     __|___|__
799:     8 | 7 | 6
800:       |   |
801: */
802: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
803: {
804:   DMDALocalInfo  info;
805:   PetscReal      is,ie,js,je;

808:   DMDAGetLocalInfo(da, &info);
809:   is   = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
810:   js   = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;

812:   if (ir >= is && ir <= ie) { /* center column */
813:     if (jr >= js && jr <= je) return 0;
814:     else if (jr < js)         return 7;
815:     else                      return 3;
816:   } else if (ir < is) {     /* left column */
817:     if (jr >= js && jr <= je) return 1;
818:     else if (jr < js)         return 8;
819:     else                      return 2;
820:   } else {                  /* right column */
821:     if (jr >= js && jr <= je) return 5;
822:     else if (jr < js)         return 6;
823:     else                      return 4;
824:   }
825: }