Actual source code: packm.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: #include <../src/dm/impls/composite/packimpl.h>       /*I  "petscdmcomposite.h"  I*/

  6: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,Mat *J)
  7: {
  8:   const DM_Composite           *com = (DM_Composite*)dm->data;
  9:   const struct DMCompositeLink *rlink,*clink;
 10:   PetscErrorCode               ierr;
 11:   IS                           *isg;
 12:   Mat                          *submats;
 13:   PetscInt                     i,j,n;

 16:   n = com->nDM;                 /* Total number of entries */

 18:   /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
 19:    * checking and allows ISEqual to compare by identity instead of by contents. */
 20:   DMCompositeGetGlobalISs(dm,&isg);

 22:   /* Get submatrices */
 23:   PetscMalloc1(n*n,&submats);
 24:   for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
 25:     for (j=0,clink=com->next; clink; j++,clink=clink->next) {
 26:       Mat sub = NULL;
 27:       if (i == j) {
 28:         DMCreateMatrix(rlink->dm,&sub);
 29:       } else if (com->FormCoupleLocations) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Cannot manage off-diagonal parts yet");
 30:       submats[i*n+j] = sub;
 31:     }
 32:   }

 34:   MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J);

 36:   /* Disown references */
 37:   for (i=0; i<n; i++) {ISDestroy(&isg[i]);}
 38:   PetscFree(isg);

 40:   for (i=0; i<n*n; i++) {
 41:     if (submats[i]) {MatDestroy(&submats[i]);}
 42:   }
 43:   PetscFree(submats);
 44:   return(0);
 45: }

 49: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm,Mat *J)
 50: {
 51:   PetscErrorCode         ierr;
 52:   DM_Composite           *com = (DM_Composite*)dm->data;
 53:   struct DMCompositeLink *next;
 54:   PetscInt               m,*dnz,*onz,i,j,mA;
 55:   Mat                    Atmp;
 56:   PetscMPIInt            rank;
 57:   PetscBool              dense = PETSC_FALSE;

 60:   /* use global vector to determine layout needed for matrix */
 61:   m = com->n;

 63:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 64:   MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);
 65:   MatSetType(*J,dm->mattype);

 67:   /*
 68:      Extremely inefficient but will compute entire Jacobian for testing
 69:   */
 70:   PetscOptionsGetBool(((PetscObject)dm)->prefix,"-dmcomposite_dense_jacobian",&dense,NULL);
 71:   if (dense) {
 72:     PetscInt    rstart,rend,*indices;
 73:     PetscScalar *values;

 75:     mA   = com->N;
 76:     MatMPIAIJSetPreallocation(*J,mA,NULL,mA-m,NULL);
 77:     MatSeqAIJSetPreallocation(*J,mA,NULL);

 79:     MatGetOwnershipRange(*J,&rstart,&rend);
 80:     PetscMalloc2(mA,&values,mA,&indices);
 81:     PetscMemzero(values,mA*sizeof(PetscScalar));
 82:     for (i=0; i<mA; i++) indices[i] = i;
 83:     for (i=rstart; i<rend; i++) {
 84:       MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);
 85:     }
 86:     PetscFree2(values,indices);
 87:     MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
 88:     MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
 89:     return(0);
 90:   }

 92:   MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
 93:   MatPreallocateInitialize(PetscObjectComm((PetscObject)dm),m,m,dnz,onz);
 94:   /* loop over packed objects, handling one at at time */
 95:   next = com->next;
 96:   while (next) {
 97:     PetscInt       nc,rstart,*ccols,maxnc;
 98:     const PetscInt *cols,*rstarts;
 99:     PetscMPIInt    proc;

101:     DMCreateMatrix(next->dm,&Atmp);
102:     MatGetOwnershipRange(Atmp,&rstart,NULL);
103:     MatGetOwnershipRanges(Atmp,&rstarts);
104:     MatGetLocalSize(Atmp,&mA,NULL);

106:     maxnc = 0;
107:     for (i=0; i<mA; i++) {
108:       MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
109:       MatRestoreRow(Atmp,rstart+i,NULL,NULL,NULL);
110:       maxnc = PetscMax(nc,maxnc);
111:     }
112:     PetscMalloc1(maxnc,&ccols);
113:     for (i=0; i<mA; i++) {
114:       MatGetRow(Atmp,rstart+i,&nc,&cols,NULL);
115:       /* remap the columns taking into how much they are shifted on each process */
116:       for (j=0; j<nc; j++) {
117:         proc = 0;
118:         while (cols[j] >= rstarts[proc+1]) proc++;
119:         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
120:       }
121:       MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);
122:       MatRestoreRow(Atmp,rstart+i,&nc,&cols,NULL);
123:     }
124:     PetscFree(ccols);
125:     MatDestroy(&Atmp);
126:     next = next->next;
127:   }
128:   if (com->FormCoupleLocations) {
129:     (*com->FormCoupleLocations)(dm,NULL,dnz,onz,__rstart,__nrows,__start,__end);
130:   }
131:   MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);
132:   MatSeqAIJSetPreallocation(*J,0,dnz);
133:   MatPreallocateFinalize(dnz,onz);

135:   if (dm->prealloc_only) return(0);

137:   next = com->next;
138:   while (next) {
139:     PetscInt          nc,rstart,row,maxnc,*ccols;
140:     const PetscInt    *cols,*rstarts;
141:     const PetscScalar *values;
142:     PetscMPIInt       proc;

144:     DMCreateMatrix(next->dm,&Atmp);
145:     MatGetOwnershipRange(Atmp,&rstart,NULL);
146:     MatGetOwnershipRanges(Atmp,&rstarts);
147:     MatGetLocalSize(Atmp,&mA,NULL);
148:     maxnc = 0;
149:     for (i=0; i<mA; i++) {
150:       MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
151:       maxnc = PetscMax(nc,maxnc);
152:       MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL);
153:     }
154:     PetscMalloc1(maxnc,&ccols);
155:     for (i=0; i<mA; i++) {
156:       MatGetRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
157:       for (j=0; j<nc; j++) {
158:         proc = 0;
159:         while (cols[j] >= rstarts[proc+1]) proc++;
160:         ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
161:       }
162:       row  = com->rstart+next->rstart+i;
163:       MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);
164:       MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
165:     }
166:     PetscFree(ccols);
167:     MatDestroy(&Atmp);
168:     next = next->next;
169:   }
170:   if (com->FormCoupleLocations) {
171:     PetscInt __rstart;
172:     MatGetOwnershipRange(*J,&__rstart,NULL);
173:     (*com->FormCoupleLocations)(dm,*J,NULL,NULL,__rstart,0,0,0);
174:   }
175:   MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
176:   MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
177:   return(0);
178: }

182: PetscErrorCode DMCreateMatrix_Composite(DM dm,Mat *J)
183: {
184:   PetscErrorCode         ierr;
185:   PetscBool              usenest;
186:   ISLocalToGlobalMapping ltogmap;

189:   DMSetUp(dm);
190:   PetscStrcmp(dm->mattype,MATNEST,&usenest);
191:   if (usenest) {
192:     DMCreateMatrix_Composite_Nest(dm,J);
193:   } else {
194:     DMCreateMatrix_Composite_AIJ(dm,J);
195:   }

197:   DMGetLocalToGlobalMapping(dm,&ltogmap);
198:   MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);
199:   return(0);
200: }