Actual source code: mpiadj.c

petsc-3.11.4 2019-09-28
Report Typos and Errors

  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5:  #include <../src/mat/impls/adj/mpi/mpiadj.h>
  6:  #include <petscsf.h>

  8: /*
  9:  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
 10:  * */
 11: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
 12: {
 13:   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
 14:   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
 15:   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
 16:   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
 17:   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
 18:   PetscLayout        rmap;
 19:   MPI_Comm           comm;
 20:   PetscSF            sf;
 21:   PetscSFNode       *iremote;
 22:   PetscBool          done;
 23:   PetscErrorCode     ierr;

 26:   PetscObjectGetComm((PetscObject)adj,&comm);
 27:   MatGetLayouts(adj,&rmap,NULL);
 28:   ISGetLocalSize(irows,&nlrows_is);
 29:   ISGetIndices(irows,&irows_indices);
 30:   PetscCalloc1(nlrows_is,&iremote);
 31:   /* construct sf graph*/
 32:   nleaves = nlrows_is;
 33:   for (i=0; i<nlrows_is; i++){
 34:     owner = -1;
 35:     rlocalindex = -1;
 36:     PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);
 37:     iremote[i].rank  = owner;
 38:     iremote[i].index = rlocalindex;
 39:   }
 40:   MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 41:   PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);
 42:   nroots = nlrows_mat;
 43:   for (i=0; i<nlrows_mat; i++){
 44:     ncols_send[i] = xadj[i+1]-xadj[i];
 45:   }
 46:   PetscSFCreate(comm,&sf);
 47:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 48:   PetscSFSetType(sf,PETSCSFBASIC);
 49:   PetscSFSetFromOptions(sf);
 50:   PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);
 51:   PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);
 52:   PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);
 53:   PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);
 54:   PetscSFDestroy(&sf);
 55:   Ncols_recv =0;
 56:   for (i=0; i<nlrows_is; i++){
 57:     Ncols_recv             += ncols_recv[i];
 58:     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
 59:   }
 60:   Ncols_send = 0;
 61:   for(i=0; i<nlrows_mat; i++){
 62:     Ncols_send += ncols_send[i];
 63:   }
 64:   PetscCalloc1(Ncols_recv,&iremote);
 65:   PetscCalloc1(Ncols_recv,&adjncy_recv);
 66:   nleaves = Ncols_recv;
 67:   Ncols_recv = 0;
 68:   for (i=0; i<nlrows_is; i++){
 69:     PetscLayoutFindOwner(rmap,irows_indices[i],&owner);
 70:     for (j=0; j<ncols_recv[i]; j++){
 71:       iremote[Ncols_recv].rank    = owner;
 72:       iremote[Ncols_recv++].index = xadj_recv[i]+j;
 73:     }
 74:   }
 75:   ISRestoreIndices(irows,&irows_indices);
 76:   /*if we need to deal with edge weights ???*/
 77:   if (a->useedgeweights){PetscCalloc1(Ncols_recv,&values_recv);}
 78:   nroots = Ncols_send;
 79:   PetscSFCreate(comm,&sf);
 80:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 81:   PetscSFSetType(sf,PETSCSFBASIC);
 82:   PetscSFSetFromOptions(sf);
 83:   PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);
 84:   PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);
 85:   if (a->useedgeweights){
 86:     PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);
 87:     PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);
 88:   }
 89:   PetscSFDestroy(&sf);
 90:   MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 91:   ISGetLocalSize(icols,&icols_n);
 92:   ISGetIndices(icols,&icols_indices);
 93:   rnclos = 0;
 94:   for (i=0; i<nlrows_is; i++){
 95:     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
 96:       PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);
 97:       if (loc<0){
 98:         adjncy_recv[j] = -1;
 99:         if (a->useedgeweights) values_recv[j] = -1;
100:         ncols_recv[i]--;
101:       } else {
102:         rnclos++;
103:       }
104:     }
105:   }
106:   ISRestoreIndices(icols,&icols_indices);
107:   PetscCalloc1(rnclos,&sadjncy);
108:   if (a->useedgeweights) {PetscCalloc1(rnclos,&svalues);}
109:   PetscCalloc1(nlrows_is+1,&sxadj);
110:   rnclos = 0;
111:   for(i=0; i<nlrows_is; i++){
112:     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
113:       if (adjncy_recv[j]<0) continue;
114:       sadjncy[rnclos] = adjncy_recv[j];
115:       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116:       rnclos++;
117:     }
118:   }
119:   for(i=0; i<nlrows_is; i++){
120:     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121:   }
122:   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { PetscFree(sxadj);}
123:   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { PetscFree(sadjncy);}
124:   if (sadj_values){
125:     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=0;
126:   } else {
127:     if (a->useedgeweights) {PetscFree(svalues);}
128:   }
129:   PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);
130:   PetscFree(adjncy_recv);
131:   if (a->useedgeweights) {PetscFree(values_recv);}
132:   return(0);
133: }

135: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136: {
137:   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138:   PetscInt          *indices,nindx,j,k,loc;
139:   PetscMPIInt        issame;
140:   const PetscInt    *irow_indices,*icol_indices;
141:   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142:   PetscErrorCode     ierr;

145:   nindx = 0;
146:   /*
147:    * Estimate a maximum number for allocating memory
148:    */
149:   for(i=0; i<n; i++){
150:     ISGetLocalSize(irow[i],&irow_n);
151:     ISGetLocalSize(icol[i],&icol_n);
152:     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
153:   }
154:   PetscCalloc1(nindx,&indices);
155:   /* construct a submat */
156:   for(i=0; i<n; i++){
157:     if (subcomm){
158:       PetscObjectGetComm((PetscObject)irow[i],&scomm_row);
159:       PetscObjectGetComm((PetscObject)icol[i],&scomm_col);
160:       MPI_Comm_compare(scomm_row,scomm_col,&issame);
161:       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
162:       MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);
163:       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
164:     } else {
165:       scomm_row = PETSC_COMM_SELF;
166:     }
167:     /*get sub-matrix data*/
168:     sxadj=0; sadjncy=0; svalues=0;
169:     MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);
170:     ISGetLocalSize(irow[i],&irow_n);
171:     ISGetLocalSize(icol[i],&icol_n);
172:     ISGetIndices(irow[i],&irow_indices);
173:     PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);
174:     ISRestoreIndices(irow[i],&irow_indices);
175:     ISGetIndices(icol[i],&icol_indices);
176:     PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);
177:     ISRestoreIndices(icol[i],&icol_indices);
178:     nindx = irow_n+icol_n;
179:     PetscSortRemoveDupsInt(&nindx,indices);
180:     /* renumber columns */
181:     for(j=0; j<irow_n; j++){
182:       for(k=sxadj[j]; k<sxadj[j+1]; k++){
183:         PetscFindInt(sadjncy[k],nindx,indices,&loc);
184: #if defined(PETSC_USE_DEBUG)
185:         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
186: #endif
187:         sadjncy[k] = loc;
188:       }
189:     }
190:     if (scall==MAT_INITIAL_MATRIX){
191:       MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);
192:     } else {
193:        Mat                sadj = *(submat[i]);
194:        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
195:        PetscObjectGetComm((PetscObject)sadj,&scomm_mat);
196:        MPI_Comm_compare(scomm_row,scomm_mat,&issame);
197:        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
198:        PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));
199:        PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);
200:        if (svalues){PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);}
201:        PetscFree(sxadj);
202:        PetscFree(sadjncy);
203:        if (svalues) {PetscFree(svalues);}
204:     }
205:   }
206:   PetscFree(indices);
207:   return(0);
208: }

210: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
211: {
212:   PetscErrorCode     ierr;
213:   /*get sub-matrices across a sub communicator */
215:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);
216:   return(0);
217: }

219: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
220: {
221:   PetscErrorCode     ierr;

224:   /*get sub-matrices based on PETSC_COMM_SELF */
225:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);
226:   return(0);
227: }

229: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
230: {
231:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
232:   PetscErrorCode    ierr;
233:   PetscInt          i,j,m = A->rmap->n;
234:   const char        *name;
235:   PetscViewerFormat format;

238:   PetscObjectGetName((PetscObject)A,&name);
239:   PetscViewerGetFormat(viewer,&format);
240:   if (format == PETSC_VIEWER_ASCII_INFO) {
241:     return(0);
242:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
243:   else {
244:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
245:     PetscViewerASCIIPushSynchronized(viewer);
246:     for (i=0; i<m; i++) {
247:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
248:       for (j=a->i[i]; j<a->i[i+1]; j++) {
249:         if (a->values) {
250:           PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);
251:         } else {
252:           PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
253:         }
254:       }
255:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
256:     }
257:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
258:     PetscViewerFlush(viewer);
259:     PetscViewerASCIIPopSynchronized(viewer);
260:   }
261:   return(0);
262: }

264: static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
265: {
267:   PetscBool      iascii;

270:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
271:   if (iascii) {
272:     MatView_MPIAdj_ASCII(A,viewer);
273:   }
274:   return(0);
275: }

277: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
278: {
279:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

283: #if defined(PETSC_USE_LOG)
284:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
285: #endif
286:   PetscFree(a->diag);
287:   if (a->freeaij) {
288:     if (a->freeaijwithfree) {
289:       if (a->i) free(a->i);
290:       if (a->j) free(a->j);
291:     } else {
292:       PetscFree(a->i);
293:       PetscFree(a->j);
294:       PetscFree(a->values);
295:     }
296:   }
297:   PetscFree(a->rowvalues);
298:   PetscFree(mat->data);
299:   PetscObjectChangeTypeName((PetscObject)mat,0);
300:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
301:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
302:   return(0);
303: }

305: static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
306: {
307:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

311:   switch (op) {
312:   case MAT_SYMMETRIC:
313:   case MAT_STRUCTURALLY_SYMMETRIC:
314:   case MAT_HERMITIAN:
315:     a->symmetric = flg;
316:     break;
317:   case MAT_SYMMETRY_ETERNAL:
318:     break;
319:   default:
320:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
321:     break;
322:   }
323:   return(0);
324: }

326: static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
327: {
328:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

332:   row -= A->rmap->rstart;
333:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
334:   *nz = a->i[row+1] - a->i[row];
335:   if (v) {
336:     PetscInt j;
337:     if (a->rowvalues_alloc < *nz) {
338:       PetscFree(a->rowvalues);
339:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
340:       PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
341:     }
342:     for (j=0; j<*nz; j++){
343:       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
344:     }
345:     *v = (*nz) ? a->rowvalues : NULL;
346:   }
347:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
348:   return(0);
349: }

351: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
352: {
354:   return(0);
355: }

357: static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
358: {
359:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
361:   PetscBool      flag;

364:   /* If the  matrix dimensions are not equal,or no of nonzeros */
365:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
366:     flag = PETSC_FALSE;
367:   }

369:   /* if the a->i are the same */
370:   PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);

372:   /* if a->j are the same */
373:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

375:   MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
376:   return(0);
377: }

379: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
380: {
381:   PetscInt   i;
382:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
383:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

386:   *m    = A->rmap->n;
387:   *ia   = a->i;
388:   *ja   = a->j;
389:   *done = PETSC_TRUE;
390:   if (oshift) {
391:     for (i=0; i<(*ia)[*m]; i++) {
392:       (*ja)[i]++;
393:     }
394:     for (i=0; i<=(*m); i++) (*ia)[i]++;
395:   }
396:   return(0);
397: }

399: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
400: {
401:   PetscInt   i;
402:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
403:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

406:   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
407:   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
408:   if (oshift) {
409:     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
410:     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
411:     for (i=0; i<=(*m); i++) (*ia)[i]--;
412:     for (i=0; i<(*ia)[*m]; i++) {
413:       (*ja)[i]--;
414:     }
415:   }
416:   return(0);
417: }

419: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
420: {
421:   Mat               B;
422:   PetscErrorCode    ierr;
423:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
424:   const PetscInt    *rj;
425:   const PetscScalar *ra;
426:   MPI_Comm          comm;

429:   MatGetSize(A,NULL,&N);
430:   MatGetLocalSize(A,&m,NULL);
431:   MatGetOwnershipRange(A,&rstart,NULL);

433:   /* count the number of nonzeros per row */
434:   for (i=0; i<m; i++) {
435:     MatGetRow(A,i+rstart,&len,&rj,NULL);
436:     for (j=0; j<len; j++) {
437:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
438:     }
439:     nzeros += len;
440:     MatRestoreRow(A,i+rstart,&len,&rj,NULL);
441:   }

443:   /* malloc space for nonzeros */
444:   PetscMalloc1(nzeros+1,&a);
445:   PetscMalloc1(N+1,&ia);
446:   PetscMalloc1(nzeros+1,&ja);

448:   nzeros = 0;
449:   ia[0]  = 0;
450:   for (i=0; i<m; i++) {
451:     MatGetRow(A,i+rstart,&len,&rj,&ra);
452:     cnt  = 0;
453:     for (j=0; j<len; j++) {
454:       if (rj[j] != i+rstart) { /* if not diagonal */
455:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
456:         ja[nzeros+cnt++] = rj[j];
457:       }
458:     }
459:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
460:     nzeros += cnt;
461:     ia[i+1] = nzeros;
462:   }

464:   PetscObjectGetComm((PetscObject)A,&comm);
465:   MatCreate(comm,&B);
466:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
467:   MatSetType(B,type);
468:   MatMPIAdjSetPreallocation(B,ia,ja,a);

470:   if (reuse == MAT_INPLACE_MATRIX) {
471:     MatHeaderReplace(A,&B);
472:   } else {
473:     *newmat = B;
474:   }
475:   return(0);
476: }

478: /* -------------------------------------------------------------------*/
479: static struct _MatOps MatOps_Values = {0,
480:                                        MatGetRow_MPIAdj,
481:                                        MatRestoreRow_MPIAdj,
482:                                        0,
483:                                 /* 4*/ 0,
484:                                        0,
485:                                        0,
486:                                        0,
487:                                        0,
488:                                        0,
489:                                 /*10*/ 0,
490:                                        0,
491:                                        0,
492:                                        0,
493:                                        0,
494:                                 /*15*/ 0,
495:                                        MatEqual_MPIAdj,
496:                                        0,
497:                                        0,
498:                                        0,
499:                                 /*20*/ 0,
500:                                        0,
501:                                        MatSetOption_MPIAdj,
502:                                        0,
503:                                 /*24*/ 0,
504:                                        0,
505:                                        0,
506:                                        0,
507:                                        0,
508:                                 /*29*/ 0,
509:                                        0,
510:                                        0,
511:                                        0,
512:                                        0,
513:                                 /*34*/ 0,
514:                                        0,
515:                                        0,
516:                                        0,
517:                                        0,
518:                                 /*39*/ 0,
519:                                        MatCreateSubMatrices_MPIAdj,
520:                                        0,
521:                                        0,
522:                                        0,
523:                                 /*44*/ 0,
524:                                        0,
525:                                        MatShift_Basic,
526:                                        0,
527:                                        0,
528:                                 /*49*/ 0,
529:                                        MatGetRowIJ_MPIAdj,
530:                                        MatRestoreRowIJ_MPIAdj,
531:                                        0,
532:                                        0,
533:                                 /*54*/ 0,
534:                                        0,
535:                                        0,
536:                                        0,
537:                                        0,
538:                                 /*59*/ 0,
539:                                        MatDestroy_MPIAdj,
540:                                        MatView_MPIAdj,
541:                                        MatConvertFrom_MPIAdj,
542:                                        0,
543:                                 /*64*/ 0,
544:                                        0,
545:                                        0,
546:                                        0,
547:                                        0,
548:                                 /*69*/ 0,
549:                                        0,
550:                                        0,
551:                                        0,
552:                                        0,
553:                                 /*74*/ 0,
554:                                        0,
555:                                        0,
556:                                        0,
557:                                        0,
558:                                 /*79*/ 0,
559:                                        0,
560:                                        0,
561:                                        0,
562:                                        0,
563:                                 /*84*/ 0,
564:                                        0,
565:                                        0,
566:                                        0,
567:                                        0,
568:                                 /*89*/ 0,
569:                                        0,
570:                                        0,
571:                                        0,
572:                                        0,
573:                                 /*94*/ 0,
574:                                        0,
575:                                        0,
576:                                        0,
577:                                        0,
578:                                 /*99*/ 0,
579:                                        0,
580:                                        0,
581:                                        0,
582:                                        0,
583:                                /*104*/ 0,
584:                                        0,
585:                                        0,
586:                                        0,
587:                                        0,
588:                                /*109*/ 0,
589:                                        0,
590:                                        0,
591:                                        0,
592:                                        0,
593:                                /*114*/ 0,
594:                                        0,
595:                                        0,
596:                                        0,
597:                                        0,
598:                                /*119*/ 0,
599:                                        0,
600:                                        0,
601:                                        0,
602:                                        0,
603:                                /*124*/ 0,
604:                                        0,
605:                                        0,
606:                                        0,
607:                                        MatCreateSubMatricesMPI_MPIAdj,
608:                                /*129*/ 0,
609:                                        0,
610:                                        0,
611:                                        0,
612:                                        0,
613:                                /*134*/ 0,
614:                                        0,
615:                                        0,
616:                                        0,
617:                                        0,
618:                                /*139*/ 0,
619:                                        0,
620:                                        0
621: };

623: static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
624: {
625:   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
626:   PetscBool       useedgeweights;
628: #if defined(PETSC_USE_DEBUG)
629:   PetscInt ii;
630: #endif

633:   PetscLayoutSetUp(B->rmap);
634:   PetscLayoutSetUp(B->cmap);
635:   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
636:   /* Make everybody knows if they are using edge weights or not */
637:   MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));

639: #if defined(PETSC_USE_DEBUG)
640:   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
641:   for (ii=1; ii<B->rmap->n; ii++) {
642:     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
643:   }
644:   for (ii=0; ii<i[B->rmap->n]; ii++) {
645:     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
646:   }
647: #endif
648:   B->preallocated = PETSC_TRUE;

650:   b->j      = j;
651:   b->i      = i;
652:   b->values = values;

654:   b->nz        = i[B->rmap->n];
655:   b->diag      = 0;
656:   b->symmetric = PETSC_FALSE;
657:   b->freeaij   = PETSC_TRUE;

659:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
660:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
661:   return(0);
662: }

664: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
665: {
666:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
668:   const PetscInt *ranges;
669:   MPI_Comm       acomm,bcomm;
670:   MPI_Group      agroup,bgroup;
671:   PetscMPIInt    i,rank,size,nranks,*ranks;

674:   *B    = NULL;
675:   PetscObjectGetComm((PetscObject)A,&acomm);
676:   MPI_Comm_size(acomm,&size);
677:   MPI_Comm_size(acomm,&rank);
678:   MatGetOwnershipRanges(A,&ranges);
679:   for (i=0,nranks=0; i<size; i++) {
680:     if (ranges[i+1] - ranges[i] > 0) nranks++;
681:   }
682:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
683:     PetscObjectReference((PetscObject)A);
684:     *B   = A;
685:     return(0);
686:   }

688:   PetscMalloc1(nranks,&ranks);
689:   for (i=0,nranks=0; i<size; i++) {
690:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
691:   }
692:   MPI_Comm_group(acomm,&agroup);
693:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
694:   PetscFree(ranks);
695:   MPI_Comm_create(acomm,bgroup,&bcomm);
696:   MPI_Group_free(&agroup);
697:   MPI_Group_free(&bgroup);
698:   if (bcomm != MPI_COMM_NULL) {
699:     PetscInt   m,N;
700:     Mat_MPIAdj *b;
701:     MatGetLocalSize(A,&m,NULL);
702:     MatGetSize(A,NULL,&N);
703:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
704:     b          = (Mat_MPIAdj*)(*B)->data;
705:     b->freeaij = PETSC_FALSE;
706:     MPI_Comm_free(&bcomm);
707:   }
708:   return(0);
709: }

711: PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
712: {
714:   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
715:   PetscInt       *Values = NULL;
716:   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
717:   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;

720:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
721:   MatGetSize(A,&M,&N);
722:   MatGetLocalSize(A,&m,NULL);
723:   nz   = adj->nz;
724:   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
725:   MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));

727:   PetscMPIIntCast(nz,&mnz);
728:   PetscCalloc2(size,&allnz,size,&dispnz);
729:   MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));
730:   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
731:   if (adj->values) {
732:     PetscMalloc1(NZ,&Values);
733:     MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
734:   }
735:   PetscMalloc1(NZ,&J);
736:   MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
737:   PetscFree2(allnz,dispnz);
738:   MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));
739:   nzstart -= nz;
740:   /* shift the i[] values so they will be correct after being received */
741:   for (i=0; i<m; i++) adj->i[i] += nzstart;
742:   PetscMalloc1(M+1,&II);
743:   PetscMPIIntCast(m,&mm);
744:   PetscMalloc2(size,&allm,size,&dispm);
745:   MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));
746:   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
747:   MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));
748:   PetscFree2(allm,dispm);
749:   II[M] = NZ;
750:   /* shift the i[] values back */
751:   for (i=0; i<m; i++) adj->i[i] -= nzstart;
752:   MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);
753:   return(0);
754: }

756: /*@
757:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

759:    Collective

761:    Input Arguments:
762: .  A - original MPIAdj matrix

764:    Output Arguments:
765: .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A

767:    Level: developer

769:    Note:
770:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

772:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

774: .seealso: MatCreateMPIAdj()
775: @*/
776: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
777: {

782:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
783:   return(0);
784: }

786: /*MC
787:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
788:    intended for use constructing orderings and partitionings.

790:   Level: beginner

792: .seealso: MatCreateMPIAdj
793: M*/

795: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
796: {
797:   Mat_MPIAdj     *b;

801:   PetscNewLog(B,&b);
802:   B->data      = (void*)b;
803:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
804:   B->assembled = PETSC_FALSE;

806:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
807:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
808:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);
809:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
810:   return(0);
811: }

813: /*@C
814:    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)

816:    Logically Collective on MPI_Comm

818:    Input Parameter:
819: .  A - the matrix

821:    Output Parameter:
822: .  B - the same matrix on all processes

824:    Level: intermediate

826: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
827: @*/
828: PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
829: {

833:   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
834:   return(0);
835: }

837: /*@C
838:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

840:    Logically Collective on MPI_Comm

842:    Input Parameters:
843: +  A - the matrix
844: .  i - the indices into j for the start of each row
845: .  j - the column indices for each row (sorted for each row).
846:        The indices in i and j start with zero (NOT with one).
847: -  values - [optional] edge weights

849:    Level: intermediate

851: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
852: @*/
853: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
854: {

858:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
859:   return(0);
860: }

862: /*@C
863:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
864:    The matrix does not have numerical values associated with it, but is
865:    intended for ordering (to reduce bandwidth etc) and partitioning.

867:    Collective on MPI_Comm

869:    Input Parameters:
870: +  comm - MPI communicator
871: .  m - number of local rows
872: .  N - number of global columns
873: .  i - the indices into j for the start of each row
874: .  j - the column indices for each row (sorted for each row).
875:        The indices in i and j start with zero (NOT with one).
876: -  values -[optional] edge weights

878:    Output Parameter:
879: .  A - the matrix

881:    Level: intermediate

883:    Notes:
884:     This matrix object does not support most matrix operations, include
885:    MatSetValues().
886:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
887:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
888:     call from Fortran you need not create the arrays with PetscMalloc().
889:    Should not include the matrix diagonals.

891:    If you already have a matrix, you can create its adjacency matrix by a call
892:    to MatConvert, specifying a type of MATMPIADJ.

894:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

896: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
897: @*/
898: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
899: {

903:   MatCreate(comm,A);
904:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
905:   MatSetType(*A,MATMPIADJ);
906:   MatMPIAdjSetPreallocation(*A,i,j,values);
907:   return(0);
908: }