Actual source code: mpiadj.c

petsc-3.7.7 2017-09-25
Report Typos and Errors
  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5: #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
  6: #include <petscsf.h>

 10: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 11: {
 12:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 13:   PetscErrorCode    ierr;
 14:   PetscInt          i,j,m = A->rmap->n;
 15:   const char        *name;
 16:   PetscViewerFormat format;

 19:   PetscObjectGetName((PetscObject)A,&name);
 20:   PetscViewerGetFormat(viewer,&format);
 21:   if (format == PETSC_VIEWER_ASCII_INFO) {
 22:     return(0);
 23:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
 24:   else {
 25:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 26:     PetscViewerASCIIPushSynchronized(viewer);
 27:     for (i=0; i<m; i++) {
 28:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
 29:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 30:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
 31:       }
 32:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 33:     }
 34:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 35:     PetscViewerFlush(viewer);
 36:     PetscViewerASCIIPopSynchronized(viewer);
 37:   }
 38:   return(0);
 39: }

 43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
 44: {
 46:   PetscBool      iascii;

 49:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 50:   if (iascii) {
 51:     MatView_MPIAdj_ASCII(A,viewer);
 52:   }
 53:   return(0);
 54: }

 58: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
 59: {
 60:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

 64: #if defined(PETSC_USE_LOG)
 65:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
 66: #endif
 67:   PetscFree(a->diag);
 68:   if (a->freeaij) {
 69:     if (a->freeaijwithfree) {
 70:       if (a->i) free(a->i);
 71:       if (a->j) free(a->j);
 72:     } else {
 73:       PetscFree(a->i);
 74:       PetscFree(a->j);
 75:       PetscFree(a->values);
 76:     }
 77:   }
 78:   PetscFree(a->rowvalues);
 79:   PetscFree(mat->data);
 80:   PetscObjectChangeTypeName((PetscObject)mat,0);
 81:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
 82:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
 83:   return(0);
 84: }

 88: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
 89: {
 90:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

 94:   switch (op) {
 95:   case MAT_SYMMETRIC:
 96:   case MAT_STRUCTURALLY_SYMMETRIC:
 97:   case MAT_HERMITIAN:
 98:     a->symmetric = flg;
 99:     break;
100:   case MAT_SYMMETRY_ETERNAL:
101:     break;
102:   default:
103:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
104:     break;
105:   }
106:   return(0);
107: }


110: /*
111:      Adds diagonal pointers to sparse matrix structure.
112: */

116: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117: {
118:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120:   PetscInt       i,j,m = A->rmap->n;

123:   PetscMalloc1(m,&a->diag);
124:   PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));
125:   for (i=0; i<A->rmap->n; i++) {
126:     for (j=a->i[i]; j<a->i[i+1]; j++) {
127:       if (a->j[j] == i) {
128:         a->diag[i] = j;
129:         break;
130:       }
131:     }
132:   }
133:   return(0);
134: }

138: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139: {
140:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

144:   row -= A->rmap->rstart;
145:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146:   *nz = a->i[row+1] - a->i[row];
147:   if (v) {
148:     PetscInt j;
149:     if (a->rowvalues_alloc < *nz) {
150:       PetscFree(a->rowvalues);
151:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
152:       PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
153:     }
154:     for (j=0; j<*nz; j++){
155:       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
156:     }
157:     *v = (*nz) ? a->rowvalues : NULL;
158:   }
159:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160:   return(0);
161: }

165: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166: {
168:   return(0);
169: }

173: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174: {
175:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177:   PetscBool      flag;

180:   /* If the  matrix dimensions are not equal,or no of nonzeros */
181:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182:     flag = PETSC_FALSE;
183:   }

185:   /* if the a->i are the same */
186:   PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);

188:   /* if a->j are the same */
189:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

191:   MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
192:   return(0);
193: }

197: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
198: {
199:   PetscInt   i;
200:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
201:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

204:   *m    = A->rmap->n;
205:   *ia   = a->i;
206:   *ja   = a->j;
207:   *done = PETSC_TRUE;
208:   if (oshift) {
209:     for (i=0; i<(*ia)[*m]; i++) {
210:       (*ja)[i]++;
211:     }
212:     for (i=0; i<=(*m); i++) (*ia)[i]++;
213:   }
214:   return(0);
215: }

219: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
220: {
221:   PetscInt   i;
222:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
223:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

226:   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227:   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228:   if (oshift) {
229:     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
230:     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
231:     for (i=0; i<=(*m); i++) (*ia)[i]--;
232:     for (i=0; i<(*ia)[*m]; i++) {
233:       (*ja)[i]--;
234:     }
235:   }
236:   return(0);
237: }

241: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
242: {
243:   Mat               B;
244:   PetscErrorCode    ierr;
245:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
246:   const PetscInt    *rj;
247:   const PetscScalar *ra;
248:   MPI_Comm          comm;

251:   MatGetSize(A,NULL,&N);
252:   MatGetLocalSize(A,&m,NULL);
253:   MatGetOwnershipRange(A,&rstart,NULL);

255:   /* count the number of nonzeros per row */
256:   for (i=0; i<m; i++) {
257:     MatGetRow(A,i+rstart,&len,&rj,NULL);
258:     for (j=0; j<len; j++) {
259:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
260:     }
261:     nzeros += len;
262:     MatRestoreRow(A,i+rstart,&len,&rj,NULL);
263:   }

265:   /* malloc space for nonzeros */
266:   PetscMalloc1(nzeros+1,&a);
267:   PetscMalloc1(N+1,&ia);
268:   PetscMalloc1(nzeros+1,&ja);

270:   nzeros = 0;
271:   ia[0]  = 0;
272:   for (i=0; i<m; i++) {
273:     MatGetRow(A,i+rstart,&len,&rj,&ra);
274:     cnt  = 0;
275:     for (j=0; j<len; j++) {
276:       if (rj[j] != i+rstart) { /* if not diagonal */
277:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
278:         ja[nzeros+cnt++] = rj[j];
279:       }
280:     }
281:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
282:     nzeros += cnt;
283:     ia[i+1] = nzeros;
284:   }

286:   PetscObjectGetComm((PetscObject)A,&comm);
287:   MatCreate(comm,&B);
288:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
289:   MatSetType(B,type);
290:   MatMPIAdjSetPreallocation(B,ia,ja,a);

292:   if (reuse == MAT_INPLACE_MATRIX) {
293:     MatHeaderReplace(A,&B);
294:   } else {
295:     *newmat = B;
296:   }
297:   return(0);
298: }

300: /* -------------------------------------------------------------------*/
301: static struct _MatOps MatOps_Values = {0,
302:                                        MatGetRow_MPIAdj,
303:                                        MatRestoreRow_MPIAdj,
304:                                        0,
305:                                 /* 4*/ 0,
306:                                        0,
307:                                        0,
308:                                        0,
309:                                        0,
310:                                        0,
311:                                 /*10*/ 0,
312:                                        0,
313:                                        0,
314:                                        0,
315:                                        0,
316:                                 /*15*/ 0,
317:                                        MatEqual_MPIAdj,
318:                                        0,
319:                                        0,
320:                                        0,
321:                                 /*20*/ 0,
322:                                        0,
323:                                        MatSetOption_MPIAdj,
324:                                        0,
325:                                 /*24*/ 0,
326:                                        0,
327:                                        0,
328:                                        0,
329:                                        0,
330:                                 /*29*/ 0,
331:                                        0,
332:                                        0,
333:                                        0,
334:                                        0,
335:                                 /*34*/ 0,
336:                                        0,
337:                                        0,
338:                                        0,
339:                                        0,
340:                                 /*39*/ 0,
341:                                        MatGetSubMatrices_MPIAdj,
342:                                        0,
343:                                        0,
344:                                        0,
345:                                 /*44*/ 0,
346:                                        0,
347:                                        MatShift_Basic,
348:                                        0,
349:                                        0,
350:                                 /*49*/ 0,
351:                                        MatGetRowIJ_MPIAdj,
352:                                        MatRestoreRowIJ_MPIAdj,
353:                                        0,
354:                                        0,
355:                                 /*54*/ 0,
356:                                        0,
357:                                        0,
358:                                        0,
359:                                        0,
360:                                 /*59*/ 0,
361:                                        MatDestroy_MPIAdj,
362:                                        MatView_MPIAdj,
363:                                        MatConvertFrom_MPIAdj,
364:                                        0,
365:                                 /*64*/ 0,
366:                                        0,
367:                                        0,
368:                                        0,
369:                                        0,
370:                                 /*69*/ 0,
371:                                        0,
372:                                        0,
373:                                        0,
374:                                        0,
375:                                 /*74*/ 0,
376:                                        0,
377:                                        0,
378:                                        0,
379:                                        0,
380:                                 /*79*/ 0,
381:                                        0,
382:                                        0,
383:                                        0,
384:                                        0,
385:                                 /*84*/ 0,
386:                                        0,
387:                                        0,
388:                                        0,
389:                                        0,
390:                                 /*89*/ 0,
391:                                        0,
392:                                        0,
393:                                        0,
394:                                        0,
395:                                 /*94*/ 0,
396:                                        0,
397:                                        0,
398:                                        0,
399:                                        0,
400:                                 /*99*/ 0,
401:                                        0,
402:                                        0,
403:                                        0,
404:                                        0,
405:                                /*104*/ 0,
406:                                        0,
407:                                        0,
408:                                        0,
409:                                        0,
410:                                /*109*/ 0,
411:                                        0,
412:                                        0,
413:                                        0,
414:                                        0,
415:                                /*114*/ 0,
416:                                        0,
417:                                        0,
418:                                        0,
419:                                        0,
420:                                /*119*/ 0,
421:                                        0,
422:                                        0,
423:                                        0,
424:                                        0,
425:                                /*124*/ 0,
426:                                        0,
427:                                        0,
428:                                        0,
429:                                        MatGetSubMatricesMPI_MPIAdj,
430:                                /*129*/ 0,
431:                                        0,
432:                                        0,
433:                                        0,
434:                                        0,
435:                                /*134*/ 0,
436:                                        0,
437:                                        0,
438:                                        0,
439:                                        0,
440:                                /*139*/ 0,
441:                                        0,
442:                                        0
443: };

447: static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
448: {
449:   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
451: #if defined(PETSC_USE_DEBUG)
452:   PetscInt ii;
453: #endif

456:   PetscLayoutSetUp(B->rmap);
457:   PetscLayoutSetUp(B->cmap);

459: #if defined(PETSC_USE_DEBUG)
460:   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
461:   for (ii=1; ii<B->rmap->n; ii++) {
462:     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
463:   }
464:   for (ii=0; ii<i[B->rmap->n]; ii++) {
465:     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
466:   }
467: #endif
468:   B->preallocated = PETSC_TRUE;

470:   b->j      = j;
471:   b->i      = i;
472:   b->values = values;

474:   b->nz        = i[B->rmap->n];
475:   b->diag      = 0;
476:   b->symmetric = PETSC_FALSE;
477:   b->freeaij   = PETSC_TRUE;

479:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
480:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
481:   return(0);
482: }

484: static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat,IS,IS, PetscInt **,PetscInt **,PetscInt **);
485: static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat,PetscInt,const IS[],const IS[],PetscBool,MatReuse,Mat **);

489: PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
490: {
491:   PetscErrorCode     ierr;
492:   /*get sub-matrices across a sub communicator */
494:   MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);
495:   return(0);
496: }


501: PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
502: {
503:   PetscErrorCode     ierr;

506:   /*get sub-matrices based on PETSC_COMM_SELF */
507:   MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);
508:   return(0);
509: }

513: static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
514: {
515:   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
516:   PetscInt          *indices,nindx,j,k,loc;
517:   PetscMPIInt        issame;
518:   const PetscInt    *irow_indices,*icol_indices;
519:   MPI_Comm           scomm_row,scomm_col,scomm_mat;
520:   PetscErrorCode     ierr;

523:   nindx = 0;
524:   /*
525:    * Estimate a maximum number for allocating memory
526:    */
527:   for(i=0; i<n; i++){
528:     ISGetLocalSize(irow[i],&irow_n);
529:     ISGetLocalSize(icol[i],&icol_n);
530:     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
531:   }
532:   PetscCalloc1(nindx,&indices);
533:   /* construct a submat */
534:   for(i=0; i<n; i++){
535:         /*comms */
536:     if(subcomm){
537:           PetscObjectGetComm((PetscObject)irow[i],&scomm_row);
538:           PetscObjectGetComm((PetscObject)icol[i],&scomm_col);
539:           MPI_Comm_compare(scomm_row,scomm_col,&issame);
540:           if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
541:           MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);
542:           if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
543:         }else{
544:           scomm_row = PETSC_COMM_SELF;
545:         }
546:         /*get sub-matrix data*/
547:         sxadj=0; sadjncy=0; svalues=0;
548:     MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);
549:     ISGetLocalSize(irow[i],&irow_n);
550:     ISGetLocalSize(icol[i],&icol_n);
551:     ISGetIndices(irow[i],&irow_indices);
552:     PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);
553:     ISRestoreIndices(irow[i],&irow_indices);
554:     ISGetIndices(icol[i],&icol_indices);
555:     PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);
556:     ISRestoreIndices(icol[i],&icol_indices);
557:     nindx = irow_n+icol_n;
558:     PetscSortRemoveDupsInt(&nindx,indices);
559:     /* renumber columns */
560:     for(j=0; j<irow_n; j++){
561:       for(k=sxadj[j]; k<sxadj[j+1]; k++){
562:             PetscFindInt(sadjncy[k],nindx,indices,&loc);
563: #if PETSC_USE_DEBUG
564:             if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
565: #endif
566:         sadjncy[k] = loc;
567:       }
568:     }
569:     if(scall==MAT_INITIAL_MATRIX){
570:       MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);
571:     }else{
572:        Mat                sadj = *(submat[i]);
573:        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
574:        PetscObjectGetComm((PetscObject)sadj,&scomm_mat);
575:        MPI_Comm_compare(scomm_row,scomm_mat,&issame);
576:        if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
577:        PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));
578:        PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);
579:        if(svalues){PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);}
580:        PetscFree(sxadj);
581:        PetscFree(sadjncy);
582:        if(svalues) {PetscFree(svalues);}
583:     }
584:   }
585:   PetscFree(indices);
586:   return(0);
587: }


592: /*
593:  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
594:  * */
595: static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
596: {
597:   PetscInt                 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
598:   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
599:   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
600:   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
601:   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
602:   PetscLayout        rmap;
603:   MPI_Comm           comm;
604:   PetscSF            sf;
605:   PetscSFNode       *iremote;
606:   PetscBool          done;
607:   PetscErrorCode     ierr;

610:   /* communicator */
611:   PetscObjectGetComm((PetscObject)adj,&comm);
612:   /* Layouts */
613:   MatGetLayouts(adj,&rmap,PETSC_NULL);
614:   /* get rows information */
615:   ISGetLocalSize(irows,&nlrows_is);
616:   ISGetIndices(irows,&irows_indices);
617:   PetscCalloc1(nlrows_is,&iremote);
618:   /* construct sf graph*/
619:   nleaves = nlrows_is;
620:   for(i=0; i<nlrows_is; i++){
621:         owner = -1;
622:         rlocalindex = -1;
623:     PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);
624:     iremote[i].rank  = owner;
625:     iremote[i].index = rlocalindex;
626:   }
627:   MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
628:   PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);
629:   nroots = nlrows_mat;
630:   for(i=0; i<nlrows_mat; i++){
631:         ncols_send[i] = xadj[i+1]-xadj[i];
632:   }
633:   PetscSFCreate(comm,&sf);
634:   PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
635:   PetscSFSetType(sf,PETSCSFBASIC);
636:   PetscSFSetFromOptions(sf);
637:   PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);
638:   PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);
639:   PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);
640:   PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);
641:   PetscSFDestroy(&sf);
642:   Ncols_recv =0;
643:   for(i=0; i<nlrows_is; i++){
644:          Ncols_recv             += ncols_recv[i];
645:          ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
646:   }
647:   Ncols_send = 0;
648:   for(i=0; i<nlrows_mat; i++){
649:         Ncols_send += ncols_send[i];
650:   }
651:   PetscCalloc1(Ncols_recv,&iremote);
652:   PetscCalloc1(Ncols_recv,&adjncy_recv);
653:   nleaves = Ncols_recv;
654:   Ncols_recv = 0;
655:   for(i=0; i<nlrows_is; i++){
656:     PetscLayoutFindOwner(rmap,irows_indices[i],&owner);
657:     for(j=0; j<ncols_recv[i]; j++){
658:       iremote[Ncols_recv].rank    = owner;
659:       iremote[Ncols_recv++].index = xadj_recv[i]+j;
660:     }
661:   }
662:   ISRestoreIndices(irows,&irows_indices);
663:   /*if we need to deal with edge weights ???*/
664:   if(a->values){isvalue=1;}else{isvalue=0;}
665:   /*involve a global communication */
666:   /*MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);*/
667:   if(isvalue){PetscCalloc1(Ncols_recv,&values_recv);}
668:   nroots = Ncols_send;
669:   PetscSFCreate(comm,&sf);
670:   PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
671:   PetscSFSetType(sf,PETSCSFBASIC);
672:   PetscSFSetFromOptions(sf);
673:   PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);
674:   PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);
675:   if(isvalue){
676:         PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);
677:         PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);
678:   }
679:   PetscSFDestroy(&sf);
680:   MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
681:   ISGetLocalSize(icols,&icols_n);
682:   ISGetIndices(icols,&icols_indices);
683:   rnclos = 0;
684:   for(i=0; i<nlrows_is; i++){
685:     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
686:       PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);
687:       if(loc<0){
688:         adjncy_recv[j] = -1;
689:         if(isvalue) values_recv[j] = -1;
690:         ncols_recv[i]--;
691:       }else{
692:             rnclos++;
693:       }
694:     }
695:   }
696:   ISRestoreIndices(icols,&icols_indices);
697:   PetscCalloc1(rnclos,&sadjncy);
698:   if(isvalue) {PetscCalloc1(rnclos,&svalues);}
699:   PetscCalloc1(nlrows_is+1,&sxadj);
700:   rnclos = 0;
701:   for(i=0; i<nlrows_is; i++){
702:         for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
703:           if(adjncy_recv[j]<0) continue;
704:           sadjncy[rnclos] = adjncy_recv[j];
705:           if(isvalue) svalues[rnclos] = values_recv[j];
706:           rnclos++;
707:         }
708:   }
709:   for(i=0; i<nlrows_is; i++){
710:         sxadj[i+1] = sxadj[i]+ncols_recv[i];
711:   }
712:   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { PetscFree(sxadj);}
713:   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ PetscFree(sadjncy);}
714:   if(sadj_values){
715:         if(isvalue) *sadj_values = svalues; else *sadj_values=0;
716:   }else{
717:         if(isvalue) {PetscFree(svalues);}
718:   }
719:   PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);
720:   PetscFree(adjncy_recv);
721:   if(isvalue) {PetscFree(values_recv);}
722:   return(0);
723: }


728: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
729: {
730:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
732:   const PetscInt *ranges;
733:   MPI_Comm       acomm,bcomm;
734:   MPI_Group      agroup,bgroup;
735:   PetscMPIInt    i,rank,size,nranks,*ranks;

738:   *B    = NULL;
739:   PetscObjectGetComm((PetscObject)A,&acomm);
740:   MPI_Comm_size(acomm,&size);
741:   MPI_Comm_size(acomm,&rank);
742:   MatGetOwnershipRanges(A,&ranges);
743:   for (i=0,nranks=0; i<size; i++) {
744:     if (ranges[i+1] - ranges[i] > 0) nranks++;
745:   }
746:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
747:     PetscObjectReference((PetscObject)A);
748:     *B   = A;
749:     return(0);
750:   }

752:   PetscMalloc1(nranks,&ranks);
753:   for (i=0,nranks=0; i<size; i++) {
754:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
755:   }
756:   MPI_Comm_group(acomm,&agroup);
757:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
758:   PetscFree(ranks);
759:   MPI_Comm_create(acomm,bgroup,&bcomm);
760:   MPI_Group_free(&agroup);
761:   MPI_Group_free(&bgroup);
762:   if (bcomm != MPI_COMM_NULL) {
763:     PetscInt   m,N;
764:     Mat_MPIAdj *b;
765:     MatGetLocalSize(A,&m,NULL);
766:     MatGetSize(A,NULL,&N);
767:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
768:     b          = (Mat_MPIAdj*)(*B)->data;
769:     b->freeaij = PETSC_FALSE;
770:     MPI_Comm_free(&bcomm);
771:   }
772:   return(0);
773: }

777: /*@
778:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

780:    Collective

782:    Input Arguments:
783: .  A - original MPIAdj matrix

785:    Output Arguments:
786: .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A

788:    Level: developer

790:    Note:
791:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

793:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

795: .seealso: MatCreateMPIAdj()
796: @*/
797: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
798: {

803:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
804:   return(0);
805: }

807: /*MC
808:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
809:    intended for use constructing orderings and partitionings.

811:   Level: beginner

813: .seealso: MatCreateMPIAdj
814: M*/

818: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
819: {
820:   Mat_MPIAdj     *b;

824:   PetscNewLog(B,&b);
825:   B->data      = (void*)b;
826:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
827:   B->assembled = PETSC_FALSE;

829:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
830:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
831:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
832:   return(0);
833: }

837: /*@C
838:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

840:    Logically Collective on MPI_Comm

842:    Input Parameters:
843: +  A - the matrix
844: .  i - the indices into j for the start of each row
845: .  j - the column indices for each row (sorted for each row).
846:        The indices in i and j start with zero (NOT with one).
847: -  values - [optional] edge weights

849:    Level: intermediate

851: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
852: @*/
853: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
854: {

858:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
859:   return(0);
860: }

864: /*@C
865:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
866:    The matrix does not have numerical values associated with it, but is
867:    intended for ordering (to reduce bandwidth etc) and partitioning.

869:    Collective on MPI_Comm

871:    Input Parameters:
872: +  comm - MPI communicator
873: .  m - number of local rows
874: .  N - number of global columns
875: .  i - the indices into j for the start of each row
876: .  j - the column indices for each row (sorted for each row).
877:        The indices in i and j start with zero (NOT with one).
878: -  values -[optional] edge weights

880:    Output Parameter:
881: .  A - the matrix

883:    Level: intermediate

885:    Notes: This matrix object does not support most matrix operations, include
886:    MatSetValues().
887:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
888:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
889:     call from Fortran you need not create the arrays with PetscMalloc().
890:    Should not include the matrix diagonals.

892:    If you already have a matrix, you can create its adjacency matrix by a call
893:    to MatConvert, specifying a type of MATMPIADJ.

895:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

897: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
898: @*/
899: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
900: {

904:   MatCreate(comm,A);
905:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
906:   MatSetType(*A,MATMPIADJ);
907:   MatMPIAdjSetPreallocation(*A,i,j,values);
908:   return(0);
909: }