Actual source code: mpiadj.c
petsc-3.4.5 2014-06-29
2: /*
3: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4: */
5: #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
9: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10: {
11: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
12: PetscErrorCode ierr;
13: PetscInt i,j,m = A->rmap->n;
14: const char *name;
15: PetscViewerFormat format;
18: PetscObjectGetName((PetscObject)A,&name);
19: PetscViewerGetFormat(viewer,&format);
20: if (format == PETSC_VIEWER_ASCII_INFO) {
21: return(0);
22: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24: } else {
25: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
26: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
27: for (i=0; i<m; i++) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
29: for (j=a->i[i]; j<a->i[i+1]; j++) {
30: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
31: }
32: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
33: }
34: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
35: PetscViewerFlush(viewer);
36: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
37: }
38: return(0);
39: }
43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44: {
46: PetscBool iascii;
49: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
50: if (iascii) {
51: MatView_MPIAdj_ASCII(A,viewer);
52: }
53: return(0);
54: }
58: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59: {
60: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
64: #if defined(PETSC_USE_LOG)
65: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66: #endif
67: PetscFree(a->diag);
68: if (a->freeaij) {
69: if (a->freeaijwithfree) {
70: if (a->i) free(a->i);
71: if (a->j) free(a->j);
72: } else {
73: PetscFree(a->i);
74: PetscFree(a->j);
75: PetscFree(a->values);
76: }
77: }
78: PetscFree(mat->data);
79: PetscObjectChangeTypeName((PetscObject)mat,0);
80: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
81: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
82: return(0);
83: }
87: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
88: {
89: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
93: switch (op) {
94: case MAT_SYMMETRIC:
95: case MAT_STRUCTURALLY_SYMMETRIC:
96: case MAT_HERMITIAN:
97: a->symmetric = flg;
98: break;
99: case MAT_SYMMETRY_ETERNAL:
100: break;
101: default:
102: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
103: break;
104: }
105: return(0);
106: }
109: /*
110: Adds diagonal pointers to sparse matrix structure.
111: */
115: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116: {
117: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
119: PetscInt i,j,m = A->rmap->n;
122: PetscMalloc(m*sizeof(PetscInt),&a->diag);
123: PetscLogObjectMemory(A,m*sizeof(PetscInt));
124: for (i=0; i<A->rmap->n; i++) {
125: for (j=a->i[i]; j<a->i[i+1]; j++) {
126: if (a->j[j] == i) {
127: a->diag[i] = j;
128: break;
129: }
130: }
131: }
132: return(0);
133: }
137: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138: {
139: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140: PetscInt *itmp;
143: row -= A->rmap->rstart;
145: if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147: *nz = a->i[row+1] - a->i[row];
148: if (v) *v = NULL;
149: if (idx) {
150: itmp = a->j + a->i[row];
151: if (*nz) {
152: *idx = itmp;
153: }
154: else *idx = 0;
155: }
156: return(0);
157: }
161: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162: {
164: return(0);
165: }
169: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
170: {
171: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
173: PetscBool flag;
176: /* If the matrix dimensions are not equal,or no of nonzeros */
177: if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178: flag = PETSC_FALSE;
179: }
181: /* if the a->i are the same */
182: PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
184: /* if a->j are the same */
185: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
187: MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
188: return(0);
189: }
193: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done)
194: {
195: PetscInt i;
196: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
197: PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
200: *m = A->rmap->n;
201: *ia = a->i;
202: *ja = a->j;
203: *done = PETSC_TRUE;
204: if (oshift) {
205: for (i=0; i<(*ia)[*m]; i++) {
206: (*ja)[i]++;
207: }
208: for (i=0; i<=(*m); i++) (*ia)[i]++;
209: }
210: return(0);
211: }
215: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done)
216: {
217: PetscInt i;
218: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
219: PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
222: if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223: if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224: if (oshift) {
225: for (i=0; i<=(*m); i++) (*ia)[i]--;
226: for (i=0; i<(*ia)[*m]; i++) {
227: (*ja)[i]--;
228: }
229: }
230: return(0);
231: }
235: PetscErrorCode MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236: {
237: Mat B;
238: PetscErrorCode ierr;
239: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240: const PetscInt *rj;
241: const PetscScalar *ra;
242: MPI_Comm comm;
245: MatGetSize(A,NULL,&N);
246: MatGetLocalSize(A,&m,NULL);
247: MatGetOwnershipRange(A,&rstart,NULL);
249: /* count the number of nonzeros per row */
250: for (i=0; i<m; i++) {
251: MatGetRow(A,i+rstart,&len,&rj,NULL);
252: for (j=0; j<len; j++) {
253: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
254: }
255: nzeros += len;
256: MatRestoreRow(A,i+rstart,&len,&rj,NULL);
257: }
259: /* malloc space for nonzeros */
260: PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
261: PetscMalloc((N+1)*sizeof(PetscInt),&ia);
262: PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);
264: nzeros = 0;
265: ia[0] = 0;
266: for (i=0; i<m; i++) {
267: MatGetRow(A,i+rstart,&len,&rj,&ra);
268: cnt = 0;
269: for (j=0; j<len; j++) {
270: if (rj[j] != i+rstart) { /* if not diagonal */
271: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
272: ja[nzeros+cnt++] = rj[j];
273: }
274: }
275: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
276: nzeros += cnt;
277: ia[i+1] = nzeros;
278: }
280: PetscObjectGetComm((PetscObject)A,&comm);
281: MatCreate(comm,&B);
282: MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
283: MatSetType(B,type);
284: MatMPIAdjSetPreallocation(B,ia,ja,a);
286: if (reuse == MAT_REUSE_MATRIX) {
287: MatHeaderReplace(A,B);
288: } else {
289: *newmat = B;
290: }
291: return(0);
292: }
294: /* -------------------------------------------------------------------*/
295: static struct _MatOps MatOps_Values = {0,
296: MatGetRow_MPIAdj,
297: MatRestoreRow_MPIAdj,
298: 0,
299: /* 4*/ 0,
300: 0,
301: 0,
302: 0,
303: 0,
304: 0,
305: /*10*/ 0,
306: 0,
307: 0,
308: 0,
309: 0,
310: /*15*/ 0,
311: MatEqual_MPIAdj,
312: 0,
313: 0,
314: 0,
315: /*20*/ 0,
316: 0,
317: MatSetOption_MPIAdj,
318: 0,
319: /*24*/ 0,
320: 0,
321: 0,
322: 0,
323: 0,
324: /*29*/ 0,
325: 0,
326: 0,
327: 0,
328: 0,
329: /*34*/ 0,
330: 0,
331: 0,
332: 0,
333: 0,
334: /*39*/ 0,
335: 0,
336: 0,
337: 0,
338: 0,
339: /*44*/ 0,
340: 0,
341: 0,
342: 0,
343: 0,
344: /*49*/ 0,
345: MatGetRowIJ_MPIAdj,
346: MatRestoreRowIJ_MPIAdj,
347: 0,
348: 0,
349: /*54*/ 0,
350: 0,
351: 0,
352: 0,
353: 0,
354: /*59*/ 0,
355: MatDestroy_MPIAdj,
356: MatView_MPIAdj,
357: MatConvertFrom_MPIAdj,
358: 0,
359: /*64*/ 0,
360: 0,
361: 0,
362: 0,
363: 0,
364: /*69*/ 0,
365: 0,
366: 0,
367: 0,
368: 0,
369: /*74*/ 0,
370: 0,
371: 0,
372: 0,
373: 0,
374: /*79*/ 0,
375: 0,
376: 0,
377: 0,
378: 0,
379: /*84*/ 0,
380: 0,
381: 0,
382: 0,
383: 0,
384: /*89*/ 0,
385: 0,
386: 0,
387: 0,
388: 0,
389: /*94*/ 0,
390: 0,
391: 0,
392: 0,
393: 0,
394: /*99*/ 0,
395: 0,
396: 0,
397: 0,
398: 0,
399: /*104*/ 0,
400: 0,
401: 0,
402: 0,
403: 0,
404: /*109*/ 0,
405: 0,
406: 0,
407: 0,
408: 0,
409: /*114*/ 0,
410: 0,
411: 0,
412: 0,
413: 0,
414: /*119*/ 0,
415: 0,
416: 0,
417: 0,
418: 0,
419: /*124*/ 0,
420: 0,
421: 0,
422: 0,
423: 0,
424: /*129*/ 0,
425: 0,
426: 0,
427: 0,
428: 0,
429: /*134*/ 0,
430: 0,
431: 0,
432: 0,
433: 0,
434: /*139*/ 0,
435: 0
436: };
440: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
441: {
442: Mat_MPIAdj *b = (Mat_MPIAdj*)B->data;
444: #if defined(PETSC_USE_DEBUG)
445: PetscInt ii;
446: #endif
449: PetscLayoutSetUp(B->rmap);
450: PetscLayoutSetUp(B->cmap);
452: #if defined(PETSC_USE_DEBUG)
453: if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
454: for (ii=1; ii<B->rmap->n; ii++) {
455: if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
456: }
457: for (ii=0; ii<i[B->rmap->n]; ii++) {
458: if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
459: }
460: #endif
461: B->preallocated = PETSC_TRUE;
463: b->j = j;
464: b->i = i;
465: b->values = values;
467: b->nz = i[B->rmap->n];
468: b->diag = 0;
469: b->symmetric = PETSC_FALSE;
470: b->freeaij = PETSC_TRUE;
472: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
473: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
474: return(0);
475: }
479: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
480: {
481: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
483: const PetscInt *ranges;
484: MPI_Comm acomm,bcomm;
485: MPI_Group agroup,bgroup;
486: PetscMPIInt i,rank,size,nranks,*ranks;
489: *B = NULL;
490: PetscObjectGetComm((PetscObject)A,&acomm);
491: MPI_Comm_size(acomm,&size);
492: MPI_Comm_size(acomm,&rank);
493: MatGetOwnershipRanges(A,&ranges);
494: for (i=0,nranks=0; i<size; i++) {
495: if (ranges[i+1] - ranges[i] > 0) nranks++;
496: }
497: if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
498: PetscObjectReference((PetscObject)A);
499: *B = A;
500: return(0);
501: }
503: PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);
504: for (i=0,nranks=0; i<size; i++) {
505: if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
506: }
507: MPI_Comm_group(acomm,&agroup);
508: MPI_Group_incl(agroup,nranks,ranks,&bgroup);
509: PetscFree(ranks);
510: MPI_Comm_create(acomm,bgroup,&bcomm);
511: MPI_Group_free(&agroup);
512: MPI_Group_free(&bgroup);
513: if (bcomm != MPI_COMM_NULL) {
514: PetscInt m,N;
515: Mat_MPIAdj *b;
516: MatGetLocalSize(A,&m,NULL);
517: MatGetSize(A,NULL,&N);
518: MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
519: b = (Mat_MPIAdj*)(*B)->data;
520: b->freeaij = PETSC_FALSE;
521: MPI_Comm_free(&bcomm);
522: }
523: return(0);
524: }
528: /*@
529: MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
531: Collective
533: Input Arguments:
534: . A - original MPIAdj matrix
536: Output Arguments:
537: . B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
539: Level: developer
541: Note:
542: This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
544: The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
546: .seealso: MatCreateMPIAdj()
547: @*/
548: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
549: {
554: PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
555: return(0);
556: }
558: /*MC
559: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
560: intended for use constructing orderings and partitionings.
562: Level: beginner
564: .seealso: MatCreateMPIAdj
565: M*/
569: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
570: {
571: Mat_MPIAdj *b;
575: PetscNewLog(B,Mat_MPIAdj,&b);
576: B->data = (void*)b;
577: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
578: B->assembled = PETSC_FALSE;
580: PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
581: PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
582: PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
583: return(0);
584: }
588: /*@C
589: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
591: Logically Collective on MPI_Comm
593: Input Parameters:
594: + A - the matrix
595: . i - the indices into j for the start of each row
596: . j - the column indices for each row (sorted for each row).
597: The indices in i and j start with zero (NOT with one).
598: - values - [optional] edge weights
600: Level: intermediate
602: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
603: @*/
604: PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
605: {
609: PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
610: return(0);
611: }
615: /*@C
616: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
617: The matrix does not have numerical values associated with it, but is
618: intended for ordering (to reduce bandwidth etc) and partitioning.
620: Collective on MPI_Comm
622: Input Parameters:
623: + comm - MPI communicator
624: . m - number of local rows
625: . N - number of global columns
626: . i - the indices into j for the start of each row
627: . j - the column indices for each row (sorted for each row).
628: The indices in i and j start with zero (NOT with one).
629: - values -[optional] edge weights
631: Output Parameter:
632: . A - the matrix
634: Level: intermediate
636: Notes: This matrix object does not support most matrix operations, include
637: MatSetValues().
638: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
639: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
640: call from Fortran you need not create the arrays with PetscMalloc().
641: Should not include the matrix diagonals.
643: If you already have a matrix, you can create its adjacency matrix by a call
644: to MatConvert, specifying a type of MATMPIADJ.
646: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
648: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
649: @*/
650: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
651: {
655: MatCreate(comm,A);
656: MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
657: MatSetType(*A,MATMPIADJ);
658: MatMPIAdjSetPreallocation(*A,i,j,values);
659: return(0);
660: }