Actual source code: dagtona.c
petsc-3.6.4 2016-04-12
2: /*
3: Tools to help solve the coarse grid problem redundantly.
4: Provides two scatter contexts that (1) map from the usual global vector
5: to all processors the entire vector in NATURAL numbering and (2)
6: from the entire vector on each processor in natural numbering extracts
7: out this processors piece in GLOBAL numbering
8: */
10: #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/
14: /*@
15: DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
16: global vector the entire vector to each processor in natural numbering
18: Collective on DMDA
20: Input Parameter:
21: . da - the distributed array context
23: Output Parameter:
24: . scatter - the scatter context
26: Level: advanced
28: .keywords: distributed array, global to local, begin, coarse problem
30: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
31: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
32: @*/
33: PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
34: {
36: PetscInt N;
37: IS from,to;
38: Vec tmplocal,global;
39: AO ao;
40: DM_DA *dd = (DM_DA*)da->data;
45: DMDAGetAO(da,&ao);
47: /* create the scatter context */
48: VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
49: VecGetSize(global,&N);
50: ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&to);
51: AOPetscToApplicationIS(ao,to);
52: ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&from);
53: VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,N,0,&tmplocal);
54: VecScatterCreate(global,from,tmplocal,to,scatter);
55: VecDestroy(&tmplocal);
56: VecDestroy(&global);
57: ISDestroy(&from);
58: ISDestroy(&to);
59: return(0);
60: }
64: /*@
65: DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
66: of the entire vector on each processor to its local part in the global vector.
68: Collective on DMDA
70: Input Parameter:
71: . da - the distributed array context
73: Output Parameter:
74: . scatter - the scatter context
76: Level: advanced
78: .keywords: distributed array, global to local, begin, coarse problem
80: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
81: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
82: @*/
83: PetscErrorCode DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
84: {
86: DM_DA *dd = (DM_DA*)da->data;
87: PetscInt M,m = dd->Nlocal,start;
88: IS from,to;
89: Vec tmplocal,global;
90: AO ao;
95: DMDAGetAO(da,&ao);
97: /* create the scatter context */
98: MPI_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)da));
99: VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,m,PETSC_DETERMINE,0,&global);
100: VecGetOwnershipRange(global,&start,NULL);
101: ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
102: AOPetscToApplicationIS(ao,from);
103: ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&to);
104: VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,M,0,&tmplocal);
105: VecScatterCreate(tmplocal,from,global,to,scatter);
106: VecDestroy(&tmplocal);
107: VecDestroy(&global);
108: ISDestroy(&from);
109: ISDestroy(&to);
110: return(0);
111: }