Actual source code: dagtona.c


  2: /*
  3:      Tools to help solve the coarse grid problem redundantly.
  4:   Provides two scatter contexts that (1) map from the usual global vector
  5:   to all processors the entire vector in NATURAL numbering and (2)
  6:   from the entire vector on each processor in natural numbering extracts
  7:   out this processors piece in GLOBAL numbering
  8: */

 10: #include <petsc/private/dmdaimpl.h>

 12: /*@
 13:    DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
 14:      global vector the entire vector to each processor in natural numbering

 16:    Collective on da

 18:    Input Parameter:
 19: .  da - the distributed array context

 21:    Output Parameter:
 22: .  scatter - the scatter context

 24:    Level: advanced

 26: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 27:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 28: @*/
 29: PetscErrorCode  DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
 30: {
 31:   PetscInt       N;
 32:   IS             from,to;
 33:   Vec            tmplocal,global;
 34:   AO             ao;
 35:   DM_DA          *dd = (DM_DA*)da->data;

 39:   DMDAGetAO(da,&ao);

 41:   /* create the scatter context */
 42:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,NULL,&global);
 43:   VecGetSize(global,&N);
 44:   ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&to);
 45:   AOPetscToApplicationIS(ao,to);
 46:   ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&from);
 47:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,N,NULL,&tmplocal);
 48:   VecScatterCreate(global,from,tmplocal,to,scatter);
 49:   VecDestroy(&tmplocal);
 50:   VecDestroy(&global);
 51:   ISDestroy(&from);
 52:   ISDestroy(&to);
 53:   return 0;
 54: }

 56: /*@
 57:    DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 58:      of the entire vector on each processor to its local part in the global vector.

 60:    Collective on da

 62:    Input Parameter:
 63: .  da - the distributed array context

 65:    Output Parameter:
 66: .  scatter - the scatter context

 68:    Level: advanced

 70: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 71:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 72: @*/
 73: PetscErrorCode  DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
 74: {
 75:   DM_DA          *dd = (DM_DA*)da->data;
 76:   PetscInt       M,m = dd->Nlocal,start;
 77:   IS             from,to;
 78:   Vec            tmplocal,global;
 79:   AO             ao;

 83:   DMDAGetAO(da,&ao);

 85:   /* create the scatter context */
 86:   MPIU_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)da));
 87:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,m,PETSC_DETERMINE,NULL,&global);
 88:   VecGetOwnershipRange(global,&start,NULL);
 89:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
 90:   AOPetscToApplicationIS(ao,from);
 91:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&to);
 92:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,M,NULL,&tmplocal);
 93:   VecScatterCreate(tmplocal,from,global,to,scatter);
 94:   VecDestroy(&tmplocal);
 95:   VecDestroy(&global);
 96:   ISDestroy(&from);
 97:   ISDestroy(&to);
 98:   return 0;
 99: }