Actual source code: characteristic.c

petsc-3.4.5 2014-06-29
  2: #include <petsc-private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
  3: #include <petscdmda.h>
  4: #include <petscviewer.h>

  6: PetscClassId  CHARACTERISTIC_CLASSID;
  7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
  8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
  9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
 10: /*
 11:    Contains the list of registered characteristic routines
 12: */
 13: PetscFunctionList CharacteristicList              = NULL;
 14: PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;

 16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
 17: PetscInt       DMDAGetNeighborRelative(DM, PassiveReal, PassiveReal);
 18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);

 20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
 21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);

 25: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
 26: {
 27:   PetscBool      iascii;

 32:   if (!viewer) {
 33:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
 34:   }

 38:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
 39:   if (!iascii) {
 40:     if (c->ops->view) {
 41:       (*c->ops->view)(c, viewer);
 42:     }
 43:   }
 44:   return(0);
 45: }

 49: PetscErrorCode CharacteristicDestroy(Characteristic *c)
 50: {

 54:   if (!*c) return(0);
 56:   if (--((PetscObject)(*c))->refct > 0) return(0);

 58:   if ((*c)->ops->destroy) {
 59:     (*(*c)->ops->destroy)((*c));
 60:   }
 61:   MPI_Type_free(&(*c)->itemType);
 62:   PetscFree((*c)->queue);
 63:   PetscFree((*c)->queueLocal);
 64:   PetscFree((*c)->queueRemote);
 65:   PetscFree((*c)->neighbors);
 66:   PetscFree((*c)->needCount);
 67:   PetscFree((*c)->localOffsets);
 68:   PetscFree((*c)->fillCount);
 69:   PetscFree((*c)->remoteOffsets);
 70:   PetscFree((*c)->request);
 71:   PetscFree((*c)->status);
 72:   PetscHeaderDestroy(c);
 73:   return(0);
 74: }

 78: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
 79: {
 80:   Characteristic newC;

 85:   *c = NULL;
 86: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
 87:   CharacteristicInitializePackage();
 88: #endif

 90:   PetscHeaderCreate(newC, _p_Characteristic, struct _CharacteristicOps, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "SemiLagrange", comm, CharacteristicDestroy, CharacteristicView);
 91:   PetscLogObjectCreate(newC);
 92:   *c   = newC;

 94:   newC->structured          = PETSC_TRUE;
 95:   newC->numIds              = 0;
 96:   newC->velocityDA          = NULL;
 97:   newC->velocity            = NULL;
 98:   newC->velocityOld         = NULL;
 99:   newC->numVelocityComp     = 0;
100:   newC->velocityComp        = NULL;
101:   newC->velocityInterp      = NULL;
102:   newC->velocityInterpLocal = NULL;
103:   newC->velocityCtx         = NULL;
104:   newC->fieldDA             = NULL;
105:   newC->field               = NULL;
106:   newC->numFieldComp        = 0;
107:   newC->fieldComp           = NULL;
108:   newC->fieldInterp         = NULL;
109:   newC->fieldInterpLocal    = NULL;
110:   newC->fieldCtx            = NULL;
111:   newC->itemType            = 0;
112:   newC->queue               = NULL;
113:   newC->queueSize           = 0;
114:   newC->queueMax            = 0;
115:   newC->queueLocal          = NULL;
116:   newC->queueLocalSize      = 0;
117:   newC->queueLocalMax       = 0;
118:   newC->queueRemote         = NULL;
119:   newC->queueRemoteSize     = 0;
120:   newC->queueRemoteMax      = 0;
121:   newC->numNeighbors        = 0;
122:   newC->neighbors           = NULL;
123:   newC->needCount           = NULL;
124:   newC->localOffsets        = NULL;
125:   newC->fillCount           = NULL;
126:   newC->remoteOffsets       = NULL;
127:   newC->request             = NULL;
128:   newC->status              = NULL;
129:   return(0);
130: }

134: /*@C
135:    CharacteristicSetType - Builds Characteristic for a particular solver.

137:    Logically Collective on Characteristic

139:    Input Parameters:
140: +  c    - the method of characteristics context
141: -  type - a known method

143:    Options Database Key:
144: .  -characteristic_type <method> - Sets the method; use -help for a list
145:     of available methods

147:    Notes:
148:    See "include/petsccharacteristic.h" for available methods

150:   Normally, it is best to use the CharacteristicSetFromOptions() command and
151:   then set the Characteristic type from the options database rather than by using
152:   this routine.  Using the options database provides the user with
153:   maximum flexibility in evaluating the many different Krylov methods.
154:   The CharacteristicSetType() routine is provided for those situations where it
155:   is necessary to set the iterative solver independently of the command
156:   line or options database.  This might be the case, for example, when
157:   the choice of iterative solver changes during the execution of the
158:   program, and the user's application is taking responsibility for
159:   choosing the appropriate method.  In other words, this routine is
160:   not for beginners.

162:   Level: intermediate

164: .keywords: Characteristic, set, method

166: .seealso: CharacteristicType

168: @*/
169: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
170: {
171:   PetscErrorCode ierr, (*r)(Characteristic);
172:   PetscBool      match;


178:   PetscObjectTypeCompare((PetscObject) c, type, &match);
179:   if (match) return(0);

181:   if (c->data) {
182:     /* destroy the old private Characteristic context */
183:     (*c->ops->destroy)(c);
184:     c->ops->destroy = NULL;
185:     c->data         = 0;
186:   }

188:    PetscFunctionListFind(CharacteristicList,type,&r);
189:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
190:   c->setupcalled = 0;
191:   (*r)(c);
192:   PetscObjectChangeTypeName((PetscObject) c, type);
193:   return(0);
194: }

198: /*@
199:    CharacteristicSetUp - Sets up the internal data structures for the
200:    later use of an iterative solver.

202:    Collective on Characteristic

204:    Input Parameter:
205: .  ksp   - iterative context obtained from CharacteristicCreate()

207:    Level: developer

209: .keywords: Characteristic, setup

211: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
212: @*/
213: PetscErrorCode CharacteristicSetUp(Characteristic c)
214: {


220:   if (!((PetscObject)c)->type_name) {
221:     CharacteristicSetType(c, CHARACTERISTICDA);
222:   }

224:   if (c->setupcalled == 2) return(0);

226:   PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
227:   if (!c->setupcalled) {
228:     (*c->ops->setup)(c);
229:   }
230:   PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
231:   c->setupcalled = 2;
232:   return(0);
233: }

237: /*@C
238:   CharacteristicRegister -  Adds a solver to the method of characteristics package.

240:    Not Collective

242:    Input Parameters:
243: +  name_solver - name of a new user-defined solver
244: -  routine_create - routine to create method context

246:   Sample usage:
247: .vb
248:     CharacteristicRegister("my_char", MyCharCreate);
249: .ve

251:   Then, your Characteristic type can be chosen with the procedural interface via
252: .vb
253:     CharacteristicCreate(MPI_Comm, Characteristic* &char);
254:     CharacteristicSetType(char,"my_char");
255: .ve
256:    or at runtime via the option
257: .vb
258:     -characteristic_type my_char
259: .ve

261:    Notes:
262:    CharacteristicRegister() may be called multiple times to add several user-defined solvers.

264: .keywords: Characteristic, register

266: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()

268:   Level: advanced
269: @*/
270: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
271: {

275:   PetscFunctionListAdd(&CharacteristicList,sname,function);
276:   return(0);
277: }

281: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
282: {
284:   c->velocityDA      = da;
285:   c->velocity        = v;
286:   c->velocityOld     = vOld;
287:   c->numVelocityComp = numComponents;
288:   c->velocityComp    = components;
289:   c->velocityInterp  = interp;
290:   c->velocityCtx     = ctx;
291:   return(0);
292: }

296: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
297: {
299:   c->velocityDA          = da;
300:   c->velocity            = v;
301:   c->velocityOld         = vOld;
302:   c->numVelocityComp     = numComponents;
303:   c->velocityComp        = components;
304:   c->velocityInterpLocal = interp;
305:   c->velocityCtx         = ctx;
306:   return(0);
307: }

311: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
312: {
314: #if 0
315:   if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
316: #endif
317:   c->fieldDA      = da;
318:   c->field        = v;
319:   c->numFieldComp = numComponents;
320:   c->fieldComp    = components;
321:   c->fieldInterp  = interp;
322:   c->fieldCtx     = ctx;
323:   return(0);
324: }

328: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
329: {
331: #if 0
332:   if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
333: #endif
334:   c->fieldDA          = da;
335:   c->field            = v;
336:   c->numFieldComp     = numComponents;
337:   c->fieldComp        = components;
338:   c->fieldInterpLocal = interp;
339:   c->fieldCtx         = ctx;
340:   return(0);
341: }

345: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
346: {
347:   CharacteristicPointDA2D Qi;
348:   DM                      da = c->velocityDA;
349:   Vec                     velocityLocal, velocityLocalOld;
350:   Vec                     fieldLocal;
351:   DMDALocalInfo           info;
352:   PetscScalar             **solArray;
353:   void                    *velocityArray;
354:   void                    *velocityArrayOld;
355:   void                    *fieldArray;
356:   PassiveScalar           *interpIndices;
357:   PassiveScalar           *velocityValues, *velocityValuesOld;
358:   PassiveScalar           *fieldValues;
359:   PetscMPIInt             rank;
360:   PetscInt                dim;
361:   PetscMPIInt             neighbors[9];
362:   PetscInt                dof;
363:   PetscInt                gx, gy;
364:   PetscInt                n, is, ie, js, je, comp;
365:   PetscErrorCode          ierr;

368:   c->queueSize = 0;
369:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
370:   DMDAGetNeighborsRank(da, neighbors);
371:   CharacteristicSetNeighbors(c, 9, neighbors);
372:   CharacteristicSetUp(c);
373:   /* global and local grid info */
374:   DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
375:   DMDAGetLocalInfo(da, &info);
376:   is   = info.xs;          ie   = info.xs+info.xm;
377:   js   = info.ys;          je   = info.ys+info.ym;
378:   /* Allocation */
379:   PetscMalloc(dim*sizeof(PetscScalar),                &interpIndices);
380:   PetscMalloc(c->numVelocityComp*sizeof(PetscScalar), &velocityValues);
381:   PetscMalloc(c->numVelocityComp*sizeof(PetscScalar), &velocityValuesOld);
382:   PetscMalloc(c->numFieldComp*sizeof(PetscScalar),    &fieldValues);
383:   PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);

385:   /* -----------------------------------------------------------------------
386:      PART 1, AT t-dt/2
387:      -----------------------------------------------------------------------*/
388:   PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
389:   /* GET POSITION AT HALF TIME IN THE PAST */
390:   if (c->velocityInterpLocal) {
391:     DMGetLocalVector(c->velocityDA, &velocityLocal);
392:     DMGetLocalVector(c->velocityDA, &velocityLocalOld);
393:     DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
394:     DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
395:     DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
396:     DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
397:     DMDAVecGetArray(c->velocityDA, velocityLocal,    &velocityArray);
398:     DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
399:   }
400:   PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
401:   for (Qi.j = js; Qi.j < je; Qi.j++) {
402:     for (Qi.i = is; Qi.i < ie; Qi.i++) {
403:       interpIndices[0] = Qi.i;
404:       interpIndices[1] = Qi.j;
405:       if (c->velocityInterpLocal) c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
406:       else c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
407:       Qi.x = Qi.i - velocityValues[0]*dt/2.0;
408:       Qi.y = Qi.j - velocityValues[1]*dt/2.0;

410:       /* Determine whether the position at t - dt/2 is local */
411:       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

413:       /* Check for Periodic boundaries and move all periodic points back onto the domain */
414:       DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
415:       CharacteristicAddPoint(c, &Qi);
416:     }
417:   }
418:   PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);

420:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
421:   CharacteristicSendCoordinatesBegin(c);
422:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);

424:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
425:   /* Calculate velocity at t_n+1/2 (local values) */
426:   PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
427:   for (n = 0; n < c->queueSize; n++) {
428:     Qi = c->queue[n];
429:     if (c->neighbors[Qi.proc] == rank) {
430:       interpIndices[0] = Qi.x;
431:       interpIndices[1] = Qi.y;
432:       if (c->velocityInterpLocal) {
433:         c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
434:         c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
435:       } else {
436:         c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
437:         c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
438:       }
439:       Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
440:       Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
441:     }
442:     c->queue[n] = Qi;
443:   }
444:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);

446:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
447:   CharacteristicSendCoordinatesEnd(c);
448:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);


451:   /* Calculate velocity at t_n+1/2 (fill remote requests) */
452:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
453:   PetscInfo1(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
454:   for (n = 0; n < c->queueRemoteSize; n++) {
455:     Qi = c->queueRemote[n];
456:     interpIndices[0] = Qi.x;
457:     interpIndices[1] = Qi.y;
458:     if (c->velocityInterpLocal) {
459:       c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
460:       c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
461:     } else {
462:       c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx);
463:       c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
464:     }
465:     Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
466:     Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
467:     c->queueRemote[n] = Qi;
468:   }
469:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
470:   PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
471:   CharacteristicGetValuesBegin(c);
472:   CharacteristicGetValuesEnd(c);
473:   if (c->velocityInterpLocal) {
474:     DMDAVecRestoreArray(c->velocityDA, velocityLocal,    &velocityArray);
475:     DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
476:     DMRestoreLocalVector(c->velocityDA, &velocityLocal);
477:     DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
478:   }
479:   PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);

481:   /* -----------------------------------------------------------------------
482:      PART 2, AT t-dt
483:      -----------------------------------------------------------------------*/

485:   /* GET POSITION AT t_n (local values) */
486:   PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
487:   PetscInfo(NULL, "Calculating position at t_{n}\n");
488:   for (n = 0; n < c->queueSize; n++) {
489:     Qi   = c->queue[n];
490:     Qi.x = Qi.i - Qi.x*dt;
491:     Qi.y = Qi.j - Qi.y*dt;

493:     /* Determine whether the position at t-dt is local */
494:     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

496:     /* Check for Periodic boundaries and move all periodic points back onto the domain */
497:     DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));

499:     c->queue[n] = Qi;
500:   }
501:   PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);

503:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
504:   CharacteristicSendCoordinatesBegin(c);
505:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

507:   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
508:   PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
509:   if (c->fieldInterpLocal) {
510:     DMGetLocalVector(c->fieldDA, &fieldLocal);
511:     DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
512:     DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
513:     DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
514:   }
515:   PetscInfo(NULL, "Calculating local field at t_{n}\n");
516:   for (n = 0; n < c->queueSize; n++) {
517:     if (c->neighbors[c->queue[n].proc] == rank) {
518:       interpIndices[0] = c->queue[n].x;
519:       interpIndices[1] = c->queue[n].y;
520:       if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
521:       else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
522:       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
523:     }
524:   }
525:   PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);

527:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
528:   CharacteristicSendCoordinatesEnd(c);
529:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

531:   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
532:   PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
533:   PetscInfo1(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
534:   for (n = 0; n < c->queueRemoteSize; n++) {
535:     interpIndices[0] = c->queueRemote[n].x;
536:     interpIndices[1] = c->queueRemote[n].y;

538:     /* for debugging purposes */
539:     if (1) { /* hacked bounds test...let's do better */
540:       PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];

542:       if ((im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar)  js - 1.) || (jm > (PetscScalar) je)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
543:     }

545:     if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
546:     else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
547:     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
548:   }
549:   PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);

551:   PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
552:   CharacteristicGetValuesBegin(c);
553:   CharacteristicGetValuesEnd(c);
554:   if (c->fieldInterpLocal) {
555:     DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
556:     DMRestoreLocalVector(c->fieldDA, &fieldLocal);
557:   }
558:   PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);

560:   /* Return field of characteristics at t_n-1 */
561:   PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
562:   DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
563:   DMDAVecGetArray(c->fieldDA, solution, &solArray);
564:   for (n = 0; n < c->queueSize; n++) {
565:     Qi = c->queue[n];
566:     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
567:   }
568:   DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
569:   PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
570:   PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);

572:   /* Cleanup */
573:   PetscFree(interpIndices);
574:   PetscFree(velocityValues);
575:   PetscFree(velocityValuesOld);
576:   PetscFree(fieldValues);
577:   return(0);
578: }

582: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
583: {

587:   c->numNeighbors = numNeighbors;
588:   PetscFree(c->neighbors);
589:   PetscMalloc(numNeighbors * sizeof(PetscMPIInt), &c->neighbors);
590:   PetscMemcpy(c->neighbors, neighbors, numNeighbors * sizeof(PetscMPIInt));
591:   return(0);
592: }

596: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
597: {
599:   if (c->queueSize >= c->queueMax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
600:   c->queue[c->queueSize++] = *point;
601:   return(0);
602: }

606: int CharacteristicSendCoordinatesBegin(Characteristic c)
607: {
608:   PetscMPIInt    rank, tag = 121;
609:   PetscInt       i, n;

613:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
614:   CharacteristicHeapSort(c, c->queue, c->queueSize);
615:   PetscMemzero(c->needCount, c->numNeighbors * sizeof(PetscInt));
616:   for (i = 0;  i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
617:   c->fillCount[0] = 0;
618:   for (n = 1; n < c->numNeighbors; n++) {
619:     MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
620:   }
621:   for (n = 1; n < c->numNeighbors; n++) {
622:     MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
623:   }
624:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
625:   /* Initialize the remote queue */
626:   c->queueLocalMax  = c->localOffsets[0]  = 0;
627:   c->queueRemoteMax = c->remoteOffsets[0] = 0;
628:   for (n = 1; n < c->numNeighbors; n++) {
629:     c->remoteOffsets[n] = c->queueRemoteMax;
630:     c->queueRemoteMax  += c->fillCount[n];
631:     c->localOffsets[n]  = c->queueLocalMax;
632:     c->queueLocalMax   += c->needCount[n];
633:   }
634:   /* HACK BEGIN */
635:   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
636:   c->needCount[0] = 0;
637:   /* HACK END */
638:   if (c->queueRemoteMax) {
639:     PetscMalloc(sizeof(CharacteristicPointDA2D) * c->queueRemoteMax, &c->queueRemote);
640:   } else c->queueRemote = NULL;
641:   c->queueRemoteSize = c->queueRemoteMax;

643:   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
644:   for (n = 1; n < c->numNeighbors; n++) {
645:     PetscInfo2(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
646:     MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
647:   }
648:   for (n = 1; n < c->numNeighbors; n++) {
649:     PetscInfo2(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
650:     MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
651:   }
652:   return(0);
653: }

657: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
658: {
659: #if 0
660:   PetscMPIInt rank;
661:   PetscInt    n;
662: #endif

666:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
667: #if 0
668:   MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
669:   for (n = 0; n < c->queueRemoteSize; n++) {
670:     if (c->neighbors[c->queueRemote[n].proc] == rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
671:   }
672: #endif
673:   return(0);
674: }

678: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
679: {
680:   PetscMPIInt    tag = 121;
681:   PetscInt       n;

685:   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
686:   for (n = 1; n < c->numNeighbors; n++) {
687:     MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
688:   }
689:   for (n = 1; n < c->numNeighbors; n++) {
690:     MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
691:   }
692:   return(0);
693: }

697: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
698: {

702:   MPI_Waitall(c->numNeighbors-1, c->request, c->status);
703:   /* Free queue of requests from other procs */
704:   PetscFree(c->queueRemote);
705:   return(0);
706: }

708: /*---------------------------------------------------------------------*/
711: /*
712:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
713: */
714: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
715: /*---------------------------------------------------------------------*/
716: {
717:   PetscErrorCode          ierr;
718:   CharacteristicPointDA2D temp;
719:   PetscInt                n;

722:   if (0) { /* Check the order of the queue before sorting */
723:     PetscInfo(NULL, "Before Heap sort\n");
724:     for (n=0; n<size; n++) {
725:       PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
726:     }
727:   }

729:   /* SORTING PHASE */
730:   for (n = (size / 2)-1; n >= 0; n--) {
731:     CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
732:   }
733:   for (n = size-1; n >= 1; n--) {
734:     temp     = queue[0];
735:     queue[0] = queue[n];
736:     queue[n] = temp;
737:     CharacteristicSiftDown(c, queue, 0, n-1);
738:   }
739:   if (0) { /* Check the order of the queue after sorting */
740:     PetscInfo(NULL, "Avter  Heap sort\n");
741:     for (n=0; n<size; n++) {
742:       PetscInfo2(NULL,"%d %d\n",n,queue[n].proc);
743:     }
744:   }
745:   return(0);
746: }

748: /*---------------------------------------------------------------------*/
751: /*
752:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
753: */
754: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
755: /*---------------------------------------------------------------------*/
756: {
757:   PetscBool               done = PETSC_FALSE;
758:   PetscInt                maxChild;
759:   CharacteristicPointDA2D temp;

762:   while ((root*2 <= bottom) && (!done)) {
763:     if (root*2 == bottom) maxChild = root * 2;
764:     else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
765:     else maxChild = root * 2 + 1;

767:     if (queue[root].proc < queue[maxChild].proc) {
768:       temp            = queue[root];
769:       queue[root]     = queue[maxChild];
770:       queue[maxChild] = temp;
771:       root            = maxChild;
772:     } else done = PETSC_TRUE;
773:   }
774:   return(0);
775: }

779: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
780: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
781: {
782:   DMDABoundaryType bx, by;
783:   PetscBool        IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
784:   MPI_Comm         comm;
785:   PetscMPIInt      rank;
786:   PetscInt         **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
787:   PetscErrorCode   ierr;

790:   PetscObjectGetComm((PetscObject) da, &comm);
791:   MPI_Comm_rank(comm, &rank);
792:   DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);

794:   if (bx == DMDA_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
795:   if (by == DMDA_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;

797:   neighbors[0] = rank;
798:   rank = 0;
799:   PetscMalloc(sizeof(PetscInt*)*PJ,&procs);
800:   for (pj=0; pj<PJ; pj++) {
801:     PetscMalloc(sizeof(PetscInt)*PI,&(procs[pj]));
802:     for (pi=0; pi<PI; pi++) {
803:       procs[pj][pi] = rank;
804:       rank++;
805:     }
806:   }

808:   pi  = neighbors[0] % PI;
809:   pj  = neighbors[0] / PI;
810:   pim = pi-1;  if (pim<0) pim=PI-1;
811:   pip = (pi+1)%PI;
812:   pjm = pj-1;  if (pjm<0) pjm=PJ-1;
813:   pjp = (pj+1)%PJ;

815:   neighbors[1] = procs[pj] [pim];
816:   neighbors[2] = procs[pjp][pim];
817:   neighbors[3] = procs[pjp][pi];
818:   neighbors[4] = procs[pjp][pip];
819:   neighbors[5] = procs[pj] [pip];
820:   neighbors[6] = procs[pjm][pip];
821:   neighbors[7] = procs[pjm][pi];
822:   neighbors[8] = procs[pjm][pim];

824:   if (!IPeriodic) {
825:     if (pi==0)    neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
826:     if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
827:   }

829:   if (!JPeriodic) {
830:     if (pj==0)    neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
831:     if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
832:   }

834:   for (pj = 0; pj < PJ; pj++) {
835:     PetscFree(procs[pj]);
836:   }
837:   PetscFree(procs);
838:   return(0);
839: }

843: /*
844:   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
845:     2 | 3 | 4
846:     __|___|__
847:     1 | 0 | 5
848:     __|___|__
849:     8 | 7 | 6
850:       |   |
851: */
852: PetscInt DMDAGetNeighborRelative(DM da, PassiveReal ir, PassiveReal jr)
853: {
854:   DMDALocalInfo  info;
855:   PassiveReal    is,ie,js,je;

858:   DMDAGetLocalInfo(da, &info);
859:   is   = (PassiveReal) info.xs - 0.5; ie = (PassiveReal) info.xs + info.xm - 0.5;
860:   js   = (PassiveReal) info.ys - 0.5; je = (PassiveReal) info.ys + info.ym - 0.5;

862:   if (ir >= is && ir <= ie) { /* center column */
863:     if (jr >= js && jr <= je) return 0;
864:     else if (jr < js)         return 7;
865:     else                      return 3;
866:   } else if (ir < is) {     /* left column */
867:     if (jr >= js && jr <= je) return 1;
868:     else if (jr < js)         return 8;
869:     else                      return 2;
870:   } else {                  /* right column */
871:     if (jr >= js && jr <= je) return 5;
872:     else if (jr < js)         return 6;
873:     else                      return 4;
874:   }
875: }