Actual source code: dagtona.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2: /*
  3:      Tools to help solve the coarse grid problem redundantly.
  4:   Provides two scatter contexts that (1) map from the usual global vector
  5:   to all processors the entire vector in NATURAL numbering and (2)
  6:   from the entire vector on each processor in natural numbering extracts
  7:   out this processors piece in GLOBAL numbering
  8: */

 10:  #include <petsc/private/dmdaimpl.h>

 12: /*@
 13:    DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
 14:      global vector the entire vector to each processor in natural numbering

 16:    Collective on DMDA

 18:    Input Parameter:
 19: .  da - the distributed array context

 21:    Output Parameter:
 22: .  scatter - the scatter context

 24:    Level: advanced

 26: .keywords: distributed array, global to local, begin, coarse problem

 28: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 29:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 30: @*/
 31: PetscErrorCode  DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
 32: {
 34:   PetscInt       N;
 35:   IS             from,to;
 36:   Vec            tmplocal,global;
 37:   AO             ao;
 38:   DM_DA          *dd = (DM_DA*)da->data;

 43:   DMDAGetAO(da,&ao);

 45:   /* create the scatter context */
 46:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
 47:   VecGetSize(global,&N);
 48:   ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&to);
 49:   AOPetscToApplicationIS(ao,to);
 50:   ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&from);
 51:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,N,0,&tmplocal);
 52:   VecScatterCreate(global,from,tmplocal,to,scatter);
 53:   VecDestroy(&tmplocal);
 54:   VecDestroy(&global);
 55:   ISDestroy(&from);
 56:   ISDestroy(&to);
 57:   return(0);
 58: }

 60: /*@
 61:    DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 62:      of the entire vector on each processor to its local part in the global vector.

 64:    Collective on DMDA

 66:    Input Parameter:
 67: .  da - the distributed array context

 69:    Output Parameter:
 70: .  scatter - the scatter context

 72:    Level: advanced

 74: .keywords: distributed array, global to local, begin, coarse problem

 76: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 77:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 78: @*/
 79: PetscErrorCode  DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
 80: {
 82:   DM_DA          *dd = (DM_DA*)da->data;
 83:   PetscInt       M,m = dd->Nlocal,start;
 84:   IS             from,to;
 85:   Vec            tmplocal,global;
 86:   AO             ao;

 91:   DMDAGetAO(da,&ao);

 93:   /* create the scatter context */
 94:   MPIU_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)da));
 95:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,m,PETSC_DETERMINE,0,&global);
 96:   VecGetOwnershipRange(global,&start,NULL);
 97:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
 98:   AOPetscToApplicationIS(ao,from);
 99:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&to);
100:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,M,0,&tmplocal);
101:   VecScatterCreate(tmplocal,from,global,to,scatter);
102:   VecDestroy(&tmplocal);
103:   VecDestroy(&global);
104:   ISDestroy(&from);
105:   ISDestroy(&to);
106:   return(0);
107: }