Actual source code: mpiptap.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: /*
  3:   Defines projective product routines where A is a MPIAIJ matrix
  4:           C = P^T * A * P
  5: */

  7: #include <../src/mat/impls/aij/seq/aij.h>
  8: #include <../src/mat/utils/freespace.h>
  9: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 10: #include <petscbt.h>
 11: #include <petsctime.h>
 12: #include <petsc/private/hashmapiv.h>
 13: #include <petsc/private/hashseti.h>
 14: #include <petscsf.h>

 16: PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
 17: {
 18:   PetscErrorCode    ierr;
 19:   PetscBool         iascii;
 20:   PetscViewerFormat format;
 21:   Mat_APMPI         *ptap;

 24:   MatCheckProduct(A,1);
 25:   ptap = (Mat_APMPI*)A->product->data;
 26:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 27:   if (iascii) {
 28:     PetscViewerGetFormat(viewer,&format);
 29:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
 30:       if (ptap->algType == 0) {
 31:         PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");
 32:       } else if (ptap->algType == 1) {
 33:         PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");
 34:       } else if (ptap->algType == 2) {
 35:         PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");
 36:       } else if (ptap->algType == 3) {
 37:         PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");
 38:       }
 39:     }
 40:   }
 41:   return(0);
 42: }

 44: PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *data)
 45: {
 46:   PetscErrorCode      ierr;
 47:   Mat_APMPI           *ptap = (Mat_APMPI*)data;
 48:   Mat_Merge_SeqsToMPI *merge;

 51:   PetscFree2(ptap->startsj_s,ptap->startsj_r);
 52:   PetscFree(ptap->bufa);
 53:   MatDestroy(&ptap->P_loc);
 54:   MatDestroy(&ptap->P_oth);
 55:   MatDestroy(&ptap->A_loc); /* used by MatTransposeMatMult() */
 56:   MatDestroy(&ptap->Rd);
 57:   MatDestroy(&ptap->Ro);
 58:   if (ptap->AP_loc) { /* used by alg_rap */
 59:     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
 60:     PetscFree(ap->i);
 61:     PetscFree2(ap->j,ap->a);
 62:     MatDestroy(&ptap->AP_loc);
 63:   } else { /* used by alg_ptap */
 64:     PetscFree(ptap->api);
 65:     PetscFree(ptap->apj);
 66:   }
 67:   MatDestroy(&ptap->C_loc);
 68:   MatDestroy(&ptap->C_oth);
 69:   if (ptap->apa) {PetscFree(ptap->apa);}

 71:   MatDestroy(&ptap->Pt);

 73:   merge = ptap->merge;
 74:   if (merge) { /* used by alg_ptap */
 75:     PetscFree(merge->id_r);
 76:     PetscFree(merge->len_s);
 77:     PetscFree(merge->len_r);
 78:     PetscFree(merge->bi);
 79:     PetscFree(merge->bj);
 80:     PetscFree(merge->buf_ri[0]);
 81:     PetscFree(merge->buf_ri);
 82:     PetscFree(merge->buf_rj[0]);
 83:     PetscFree(merge->buf_rj);
 84:     PetscFree(merge->coi);
 85:     PetscFree(merge->coj);
 86:     PetscFree(merge->owners_co);
 87:     PetscLayoutDestroy(&merge->rowmap);
 88:     PetscFree(ptap->merge);
 89:   }
 90:   ISLocalToGlobalMappingDestroy(&ptap->ltog);

 92:   PetscSFDestroy(&ptap->sf);
 93:   PetscFree(ptap->c_othi);
 94:   PetscFree(ptap->c_rmti);
 95:   PetscFree(ptap);
 96:   return(0);
 97: }

 99: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
100: {
101:   PetscErrorCode    ierr;
102:   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
103:   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
104:   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
105:   Mat_APMPI         *ptap;
106:   Mat               AP_loc,C_loc,C_oth;
107:   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
108:   PetscScalar       *apa;
109:   const PetscInt    *cols;
110:   const PetscScalar *vals;

113:   MatCheckProduct(C,3);
114:   ptap = (Mat_APMPI*)C->product->data;
115:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
116:   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

118:   MatZeroEntries(C);

120:   /* 1) get R = Pd^T,Ro = Po^T */
121:   if (ptap->reuse == MAT_REUSE_MATRIX) {
122:     MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);
123:     MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);
124:   }

126:   /* 2) get AP_loc */
127:   AP_loc = ptap->AP_loc;
128:   ap = (Mat_SeqAIJ*)AP_loc->data;

130:   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
131:   /*-----------------------------------------------------*/
132:   if (ptap->reuse == MAT_REUSE_MATRIX) {
133:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
134:     MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
135:     MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);
136:   }

138:   /* 2-2) compute numeric A_loc*P - dominating part */
139:   /* ---------------------------------------------- */
140:   /* get data from symbolic products */
141:   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
142:   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;

144:   api   = ap->i;
145:   apj   = ap->j;
146:   ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);
147:   for (i=0; i<am; i++) {
148:     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
149:     apnz = api[i+1] - api[i];
150:     apa = ap->a + api[i];
151:     PetscArrayzero(apa,apnz);
152:     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
153:   }
154:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);
155:   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);

157:   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
158:   /* Always use scalable version since we are in the MPI scalable version */
159:   MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);
160:   MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);

162:   C_loc = ptap->C_loc;
163:   C_oth = ptap->C_oth;

165:   /* add C_loc and Co to to C */
166:   MatGetOwnershipRange(C,&rstart,&rend);

168:   /* C_loc -> C */
169:   cm    = C_loc->rmap->N;
170:   c_seq = (Mat_SeqAIJ*)C_loc->data;
171:   cols = c_seq->j;
172:   vals = c_seq->a;
173:   ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);

175:   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
176:   /* when there are no off-processor parts.  */
177:   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
178:   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
179:   /* a table, and other, more complex stuff has to be done. */
180:   if (C->assembled) {
181:     C->was_assembled = PETSC_TRUE;
182:     C->assembled     = PETSC_FALSE;
183:   }
184:   if (C->was_assembled) {
185:     for (i=0; i<cm; i++) {
186:       ncols = c_seq->i[i+1] - c_seq->i[i];
187:       row = rstart + i;
188:       MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);
189:       cols += ncols; vals += ncols;
190:     }
191:   } else {
192:     MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);
193:   }
194:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);
195:   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);

197:   /* Co -> C, off-processor part */
198:   cm = C_oth->rmap->N;
199:   c_seq = (Mat_SeqAIJ*)C_oth->data;
200:   cols = c_seq->j;
201:   vals = c_seq->a;
202:   ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);
203:   for (i=0; i<cm; i++) {
204:     ncols = c_seq->i[i+1] - c_seq->i[i];
205:     row = p->garray[i];
206:     MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);
207:     cols += ncols; vals += ncols;
208:   }
209:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
210:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

212:   ptap->reuse = MAT_REUSE_MATRIX;

214:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);
215:   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
216:   return(0);
217: }

219: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat Cmpi)
220: {
221:   PetscErrorCode      ierr;
222:   Mat_APMPI           *ptap;
223:   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
224:   MPI_Comm            comm;
225:   PetscMPIInt         size,rank;
226:   Mat                 P_loc,P_oth;
227:   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
228:   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
229:   PetscInt            *lnk,i,k,pnz,row,nsend;
230:   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,nrecv;
231:   PETSC_UNUSED PetscMPIInt icompleted=0;
232:   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
233:   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
234:   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
235:   MPI_Request         *swaits,*rwaits;
236:   MPI_Status          *sstatus,rstatus;
237:   PetscLayout         rowmap;
238:   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
239:   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
240:   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
241:   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
242:   PetscScalar         *apv;
243:   PetscTable          ta;
244:   MatType             mtype;
245:   const char          *prefix;
246: #if defined(PETSC_USE_INFO)
247:   PetscReal           apfill;
248: #endif

251:   MatCheckProduct(Cmpi,4);
252:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
253:   PetscObjectGetComm((PetscObject)A,&comm);
254:   MPI_Comm_size(comm,&size);
255:   MPI_Comm_rank(comm,&rank);

257:   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;

259:   /* create symbolic parallel matrix Cmpi */
260:   MatGetType(A,&mtype);
261:   MatSetType(Cmpi,mtype);

263:   /* create struct Mat_APMPI and attached it to C later */
264:   PetscNew(&ptap);
265:   ptap->reuse = MAT_INITIAL_MATRIX;
266:   ptap->algType = 0;

268:   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
269:   MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);
270:   /* get P_loc by taking all local rows of P */
271:   MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);

273:   ptap->P_loc = P_loc;
274:   ptap->P_oth = P_oth;

276:   /* (0) compute Rd = Pd^T, Ro = Po^T  */
277:   /* --------------------------------- */
278:   MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);
279:   MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);

281:   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
282:   /* ----------------------------------------------------------------- */
283:   p_loc  = (Mat_SeqAIJ*)P_loc->data;
284:   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;

286:   /* create and initialize a linked list */
287:   PetscTableCreate(pn,pN,&ta); /* for compute AP_loc and Cmpi */
288:   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
289:   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
290:   PetscTableGetCount(ta,&Crmax); /* Crmax = nnz(sum of Prows) */

292:   PetscLLCondensedCreate_Scalable(Crmax,&lnk);

294:   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
295:   if (ao) {
296:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);
297:   } else {
298:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);
299:   }
300:   current_space = free_space;
301:   nspacedouble  = 0;

303:   PetscMalloc1(am+1,&api);
304:   api[0] = 0;
305:   for (i=0; i<am; i++) {
306:     /* diagonal portion: Ad[i,:]*P */
307:     ai = ad->i; pi = p_loc->i;
308:     nzi = ai[i+1] - ai[i];
309:     aj  = ad->j + ai[i];
310:     for (j=0; j<nzi; j++) {
311:       row  = aj[j];
312:       pnz  = pi[row+1] - pi[row];
313:       Jptr = p_loc->j + pi[row];
314:       /* add non-zero cols of P into the sorted linked list lnk */
315:       PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);
316:     }
317:     /* off-diagonal portion: Ao[i,:]*P */
318:     if (ao) {
319:       ai = ao->i; pi = p_oth->i;
320:       nzi = ai[i+1] - ai[i];
321:       aj  = ao->j + ai[i];
322:       for (j=0; j<nzi; j++) {
323:         row  = aj[j];
324:         pnz  = pi[row+1] - pi[row];
325:         Jptr = p_oth->j + pi[row];
326:         PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);
327:       }
328:     }
329:     apnz     = lnk[0];
330:     api[i+1] = api[i] + apnz;

332:     /* if free space is not available, double the total space in the list */
333:     if (current_space->local_remaining<apnz) {
334:       PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);
335:       nspacedouble++;
336:     }

338:     /* Copy data into free space, then initialize lnk */
339:     PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);

341:     current_space->array           += apnz;
342:     current_space->local_used      += apnz;
343:     current_space->local_remaining -= apnz;
344:   }
345:   /* Allocate space for apj and apv, initialize apj, and */
346:   /* destroy list of free space and other temporary array(s) */
347:   PetscCalloc2(api[am],&apj,api[am],&apv);
348:   PetscFreeSpaceContiguous(&free_space,apj);
349:   PetscLLCondensedDestroy_Scalable(lnk);

351:   /* Create AP_loc for reuse */
352:   MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);
353:   MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);

355: #if defined(PETSC_USE_INFO)
356:   if (ao) {
357:     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
358:   } else {
359:     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
360:   }
361:   ptap->AP_loc->info.mallocs           = nspacedouble;
362:   ptap->AP_loc->info.fill_ratio_given  = fill;
363:   ptap->AP_loc->info.fill_ratio_needed = apfill;

365:   if (api[am]) {
366:     PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);
367:     PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);
368:   } else {
369:     PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");
370:   }
371: #endif

373:   /* (2-1) compute symbolic Co = Ro*AP_loc  */
374:   /* -------------------------------------- */
375:   MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);
376:   MatGetOptionsPrefix(A,&prefix);
377:   MatSetOptionsPrefix(ptap->C_oth,prefix);
378:   MatAppendOptionsPrefix(ptap->C_oth,"inner_offdiag_");

380:   MatProductSetType(ptap->C_oth,MATPRODUCT_AB);
381:   MatProductSetAlgorithm(ptap->C_oth,"sorted");
382:   MatProductSetFill(ptap->C_oth,fill);
383:   MatProductSetFromOptions(ptap->C_oth);
384:   MatProductSymbolic(ptap->C_oth);

386:   /* (3) send coj of C_oth to other processors  */
387:   /* ------------------------------------------ */
388:   /* determine row ownership */
389:   PetscLayoutCreate(comm,&rowmap);
390:   rowmap->n  = pn;
391:   rowmap->bs = 1;
392:   PetscLayoutSetUp(rowmap);
393:   owners = rowmap->range;

395:   /* determine the number of messages to send, their lengths */
396:   PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);
397:   PetscArrayzero(len_s,size);
398:   PetscArrayzero(len_si,size);

400:   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
401:   coi   = c_oth->i; coj = c_oth->j;
402:   con   = ptap->C_oth->rmap->n;
403:   proc  = 0;
404:   ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);
405:   for (i=0; i<con; i++) {
406:     while (prmap[i] >= owners[proc+1]) proc++;
407:     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
408:     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
409:   }

411:   len          = 0; /* max length of buf_si[], see (4) */
412:   owners_co[0] = 0;
413:   nsend        = 0;
414:   for (proc=0; proc<size; proc++) {
415:     owners_co[proc+1] = owners_co[proc] + len_si[proc];
416:     if (len_s[proc]) {
417:       nsend++;
418:       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
419:       len         += len_si[proc];
420:     }
421:   }

423:   /* determine the number and length of messages to receive for coi and coj  */
424:   PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);
425:   PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);

427:   /* post the Irecv and Isend of coj */
428:   PetscCommGetNewTag(comm,&tagj);
429:   PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);
430:   PetscMalloc1(nsend+1,&swaits);
431:   for (proc=0, k=0; proc<size; proc++) {
432:     if (!len_s[proc]) continue;
433:     i    = owners_co[proc];
434:     MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);
435:     k++;
436:   }

438:   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
439:   /* ---------------------------------------- */
440:   MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);
441:   MatProductSetType(ptap->C_loc,MATPRODUCT_AB);
442:   MatProductSetAlgorithm(ptap->C_loc,"default");
443:   MatProductSetFill(ptap->C_loc,fill);

445:   MatSetOptionsPrefix(ptap->C_loc,prefix);
446:   MatAppendOptionsPrefix(ptap->C_loc,"inner_diag_");

448:   MatProductSetFromOptions(ptap->C_loc);
449:   MatProductSymbolic(ptap->C_loc);

451:   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
452:   ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);

454:   /* receives coj are complete */
455:   for (i=0; i<nrecv; i++) {
456:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
457:   }
458:   PetscFree(rwaits);
459:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

461:   /* add received column indices into ta to update Crmax */
462:   for (k=0; k<nrecv; k++) {/* k-th received message */
463:     Jptr = buf_rj[k];
464:     for (j=0; j<len_r[k]; j++) {
465:       PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);
466:     }
467:   }
468:   PetscTableGetCount(ta,&Crmax);
469:   PetscTableDestroy(&ta);

471:   /* (4) send and recv coi */
472:   /*-----------------------*/
473:   PetscCommGetNewTag(comm,&tagi);
474:   PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);
475:   PetscMalloc1(len+1,&buf_s);
476:   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
477:   for (proc=0,k=0; proc<size; proc++) {
478:     if (!len_s[proc]) continue;
479:     /* form outgoing message for i-structure:
480:          buf_si[0]:                 nrows to be sent
481:                [1:nrows]:           row index (global)
482:                [nrows+1:2*nrows+1]: i-structure index
483:     */
484:     /*-------------------------------------------*/
485:     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
486:     buf_si_i    = buf_si + nrows+1;
487:     buf_si[0]   = nrows;
488:     buf_si_i[0] = 0;
489:     nrows       = 0;
490:     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
491:       nzi = coi[i+1] - coi[i];
492:       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
493:       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
494:       nrows++;
495:     }
496:     MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);
497:     k++;
498:     buf_si += len_si[proc];
499:   }
500:   for (i=0; i<nrecv; i++) {
501:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
502:   }
503:   PetscFree(rwaits);
504:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

506:   PetscFree4(len_s,len_si,sstatus,owners_co);
507:   PetscFree(len_ri);
508:   PetscFree(swaits);
509:   PetscFree(buf_s);

511:   /* (5) compute the local portion of Cmpi      */
512:   /* ------------------------------------------ */
513:   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
514:   PetscFreeSpaceGet(Crmax,&free_space);
515:   current_space = free_space;

517:   PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);
518:   for (k=0; k<nrecv; k++) {
519:     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
520:     nrows       = *buf_ri_k[k];
521:     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
522:     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
523:   }

525:   MatPreallocateInitialize(comm,pn,pn,dnz,onz);
526:   PetscLLCondensedCreate_Scalable(Crmax,&lnk);
527:   for (i=0; i<pn; i++) {
528:     /* add C_loc into Cmpi */
529:     nzi  = c_loc->i[i+1] - c_loc->i[i];
530:     Jptr = c_loc->j + c_loc->i[i];
531:     PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);

533:     /* add received col data into lnk */
534:     for (k=0; k<nrecv; k++) { /* k-th received message */
535:       if (i == *nextrow[k]) { /* i-th row */
536:         nzi  = *(nextci[k]+1) - *nextci[k];
537:         Jptr = buf_rj[k] + *nextci[k];
538:         PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);
539:         nextrow[k]++; nextci[k]++;
540:       }
541:     }
542:     nzi = lnk[0];

544:     /* copy data into free space, then initialize lnk */
545:     PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);
546:     MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);
547:   }
548:   PetscFree3(buf_ri_k,nextrow,nextci);
549:   PetscLLCondensedDestroy_Scalable(lnk);
550:   PetscFreeSpaceDestroy(free_space);

552:   /* local sizes and preallocation */
553:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
554:   if (P->cmap->bs > 0) {
555:     PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);
556:     PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);
557:   }
558:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
559:   MatPreallocateFinalize(dnz,onz);

561:   /* members in merge */
562:   PetscFree(id_r);
563:   PetscFree(len_r);
564:   PetscFree(buf_ri[0]);
565:   PetscFree(buf_ri);
566:   PetscFree(buf_rj[0]);
567:   PetscFree(buf_rj);
568:   PetscLayoutDestroy(&rowmap);

570:   nout = 0;
571:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);
572:   if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
573:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);
574:   if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);

576:   /* attach the supporting struct to Cmpi for reuse */
577:   Cmpi->product->data    = ptap;
578:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
579:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;

581:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
582:   Cmpi->assembled        = PETSC_FALSE;
583:   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
584:   return(0);
585: }

587: PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHSetI dht,PetscHSetI oht)
588: {
589:   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
590:   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
591:   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
592:   PetscInt             pcstart,pcend,column,offset;
593:   PetscErrorCode       ierr;

596:   pcstart = P->cmap->rstart;
597:   pcstart *= dof;
598:   pcend   = P->cmap->rend;
599:   pcend   *= dof;
600:   /* diagonal portion: Ad[i,:]*P */
601:   ai = ad->i;
602:   nzi = ai[i+1] - ai[i];
603:   aj  = ad->j + ai[i];
604:   for (j=0; j<nzi; j++) {
605:     row  = aj[j];
606:     offset = row%dof;
607:     row   /= dof;
608:     nzpi = pd->i[row+1] - pd->i[row];
609:     pj  = pd->j + pd->i[row];
610:     for (k=0; k<nzpi; k++) {
611:       PetscHSetIAdd(dht,pj[k]*dof+offset+pcstart);
612:     }
613:   }
614:   /* off diag P */
615:   for (j=0; j<nzi; j++) {
616:     row  = aj[j];
617:     offset = row%dof;
618:     row   /= dof;
619:     nzpi = po->i[row+1] - po->i[row];
620:     pj  = po->j + po->i[row];
621:     for (k=0; k<nzpi; k++) {
622:       PetscHSetIAdd(oht,p->garray[pj[k]]*dof+offset);
623:     }
624:   }

626:   /* off diagonal part: Ao[i, :]*P_oth */
627:   if (ao) {
628:     ai = ao->i;
629:     pi = p_oth->i;
630:     nzi = ai[i+1] - ai[i];
631:     aj  = ao->j + ai[i];
632:     for (j=0; j<nzi; j++) {
633:       row  = aj[j];
634:       offset = a->garray[row]%dof;
635:       row  = map[row];
636:       pnz  = pi[row+1] - pi[row];
637:       p_othcols = p_oth->j + pi[row];
638:       for (col=0; col<pnz; col++) {
639:         column = p_othcols[col] * dof + offset;
640:         if (column>=pcstart && column<pcend) {
641:           PetscHSetIAdd(dht,column);
642:         } else {
643:           PetscHSetIAdd(oht,column);
644:         }
645:       }
646:     }
647:   } /* end if (ao) */
648:   return(0);
649: }

651: PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHMapIV hmap)
652: {
653:   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
654:   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
655:   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi,offset;
656:   PetscScalar          ra,*aa,*pa;
657:   PetscErrorCode       ierr;

660:   pcstart = P->cmap->rstart;
661:   pcstart *= dof;

663:   /* diagonal portion: Ad[i,:]*P */
664:   ai  = ad->i;
665:   nzi = ai[i+1] - ai[i];
666:   aj  = ad->j + ai[i];
667:   aa  = ad->a + ai[i];
668:   for (j=0; j<nzi; j++) {
669:     ra   = aa[j];
670:     row  = aj[j];
671:     offset = row%dof;
672:     row    /= dof;
673:     nzpi = pd->i[row+1] - pd->i[row];
674:     pj = pd->j + pd->i[row];
675:     pa = pd->a + pd->i[row];
676:     for (k=0; k<nzpi; k++) {
677:       PetscHMapIVAddValue(hmap,pj[k]*dof+offset+pcstart,ra*pa[k]);
678:     }
679:     PetscLogFlops(2.0*nzpi);
680:   }
681:   for (j=0; j<nzi; j++) {
682:     ra   = aa[j];
683:     row  = aj[j];
684:     offset = row%dof;
685:     row   /= dof;
686:     nzpi = po->i[row+1] - po->i[row];
687:     pj = po->j + po->i[row];
688:     pa = po->a + po->i[row];
689:     for (k=0; k<nzpi; k++) {
690:       PetscHMapIVAddValue(hmap,p->garray[pj[k]]*dof+offset,ra*pa[k]);
691:     }
692:     PetscLogFlops(2.0*nzpi);
693:   }

695:   /* off diagonal part: Ao[i, :]*P_oth */
696:   if (ao) {
697:     ai = ao->i;
698:     pi = p_oth->i;
699:     nzi = ai[i+1] - ai[i];
700:     aj  = ao->j + ai[i];
701:     aa  = ao->a + ai[i];
702:     for (j=0; j<nzi; j++) {
703:       row  = aj[j];
704:       offset = a->garray[row]%dof;
705:       row    = map[row];
706:       ra   = aa[j];
707:       pnz  = pi[row+1] - pi[row];
708:       p_othcols = p_oth->j + pi[row];
709:       pa   = p_oth->a + pi[row];
710:       for (col=0; col<pnz; col++) {
711:         PetscHMapIVAddValue(hmap,p_othcols[col]*dof+offset,ra*pa[col]);
712:       }
713:       PetscLogFlops(2.0*pnz);
714:     }
715:   } /* end if (ao) */

717:   return(0);
718: }

720: PetscErrorCode MatGetBrowsOfAcols_MPIXAIJ(Mat,Mat,PetscInt dof,MatReuse,Mat*);

722: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,Mat C)
723: {
724:   PetscErrorCode    ierr;
725:   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
726:   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
727:   Mat_APMPI         *ptap;
728:   PetscHMapIV       hmap;
729:   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
730:   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
731:   PetscInt          offset,ii,pocol;
732:   const PetscInt    *mappingindices;
733:   IS                map;

736:   MatCheckProduct(C,4);
737:   ptap = (Mat_APMPI*)C->product->data;
738:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
739:   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

741:   MatZeroEntries(C);

743:   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
744:   /*-----------------------------------------------------*/
745:   if (ptap->reuse == MAT_REUSE_MATRIX) {
746:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
747:      MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);
748:   }
749:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);

751:   MatGetLocalSize(p->B,NULL,&pon);
752:   pon *= dof;
753:   PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);
754:   MatGetLocalSize(A,&am,NULL);
755:   cmaxr = 0;
756:   for (i=0; i<pon; i++) {
757:     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
758:   }
759:   PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);
760:   PetscHMapIVCreate(&hmap);
761:   PetscHMapIVResize(hmap,cmaxr);
762:   ISGetIndices(map,&mappingindices);
763:   for (i=0; i<am && pon; i++) {
764:     PetscHMapIVClear(hmap);
765:     offset = i%dof;
766:     ii     = i/dof;
767:     nzi = po->i[ii+1] - po->i[ii];
768:     if (!nzi) continue;
769:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
770:     voff = 0;
771:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
772:     if (!voff) continue;

774:     /* Form C(ii, :) */
775:     poj = po->j + po->i[ii];
776:     poa = po->a + po->i[ii];
777:     for (j=0; j<nzi; j++) {
778:       pocol = poj[j]*dof+offset;
779:       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
780:       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
781:       for (jj=0; jj<voff; jj++) {
782:         apvaluestmp[jj] = apvalues[jj]*poa[j];
783:         /*If the row is empty */
784:         if (!c_rmtc[pocol]) {
785:           c_rmtjj[jj] = apindices[jj];
786:           c_rmtaa[jj] = apvaluestmp[jj];
787:           c_rmtc[pocol]++;
788:         } else {
789:           PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);
790:           if (loc>=0){ /* hit */
791:             c_rmtaa[loc] += apvaluestmp[jj];
792:             PetscLogFlops(1.0);
793:           } else { /* new element */
794:             loc = -(loc+1);
795:             /* Move data backward */
796:             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
797:               c_rmtjj[kk] = c_rmtjj[kk-1];
798:               c_rmtaa[kk] = c_rmtaa[kk-1];
799:             }/* End kk */
800:             c_rmtjj[loc] = apindices[jj];
801:             c_rmtaa[loc] = apvaluestmp[jj];
802:             c_rmtc[pocol]++;
803:           }
804:         }
805:         PetscLogFlops(voff);
806:       } /* End jj */
807:     } /* End j */
808:   } /* End i */

810:   PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);
811:   PetscHMapIVDestroy(&hmap);

813:   MatGetLocalSize(P,NULL,&pn);
814:   pn *= dof;
815:   PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);

817:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
818:   PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
819:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
820:   pcstart = pcstart*dof;
821:   pcend   = pcend*dof;
822:   cd = (Mat_SeqAIJ*)(c->A)->data;
823:   co = (Mat_SeqAIJ*)(c->B)->data;

825:   cmaxr = 0;
826:   for (i=0; i<pn; i++) {
827:     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
828:   }
829:   PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);
830:   PetscHMapIVCreate(&hmap);
831:   PetscHMapIVResize(hmap,cmaxr);
832:   for (i=0; i<am && pn; i++) {
833:     PetscHMapIVClear(hmap);
834:     offset = i%dof;
835:     ii     = i/dof;
836:     nzi = pd->i[ii+1] - pd->i[ii];
837:     if (!nzi) continue;
838:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
839:     voff = 0;
840:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
841:     if (!voff) continue;
842:     /* Form C(ii, :) */
843:     pdj = pd->j + pd->i[ii];
844:     pda = pd->a + pd->i[ii];
845:     for (j=0; j<nzi; j++) {
846:       row = pcstart + pdj[j] * dof + offset;
847:       for (jj=0; jj<voff; jj++) {
848:         apvaluestmp[jj] = apvalues[jj]*pda[j];
849:       }
850:       PetscLogFlops(voff);
851:       MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);
852:     }
853:   }
854:   ISRestoreIndices(map,&mappingindices);
855:   MatGetOwnershipRangeColumn(C,&ccstart,&ccend);
856:   PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);
857:   PetscHMapIVDestroy(&hmap);
858:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
859:   PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
860:   PetscFree2(c_rmtj,c_rmta);

862:   /* Add contributions from remote */
863:   for (i = 0; i < pn; i++) {
864:     row = i + pcstart;
865:     MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);
866:   }
867:   PetscFree2(c_othj,c_otha);

869:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
870:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

872:   ptap->reuse = MAT_REUSE_MATRIX;
873:   return(0);
874: }

876: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
877: {
878:   PetscErrorCode      ierr;


882:   MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(A,P,1,C);
883:   return(0);
884: }

886: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,Mat C)
887: {
888:   PetscErrorCode    ierr;
889:   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
890:   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
891:   Mat_APMPI         *ptap;
892:   PetscHMapIV       hmap;
893:   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
894:   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
895:   PetscInt          offset,ii,pocol;
896:   const PetscInt    *mappingindices;
897:   IS                map;

900:   MatCheckProduct(C,4);
901:   ptap = (Mat_APMPI*)C->product->data;
902:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
903:   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

905:   MatZeroEntries(C);

907:   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
908:   /*-----------------------------------------------------*/
909:   if (ptap->reuse == MAT_REUSE_MATRIX) {
910:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
911:      MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);
912:   }
913:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);
914:   MatGetLocalSize(p->B,NULL,&pon);
915:   pon *= dof;
916:   MatGetLocalSize(P,NULL,&pn);
917:   pn  *= dof;

919:   PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);
920:   MatGetLocalSize(A,&am,NULL);
921:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
922:   pcstart *= dof;
923:   pcend   *= dof;
924:   cmaxr = 0;
925:   for (i=0; i<pon; i++) {
926:     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
927:   }
928:   cd = (Mat_SeqAIJ*)(c->A)->data;
929:   co = (Mat_SeqAIJ*)(c->B)->data;
930:   for (i=0; i<pn; i++) {
931:     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
932:   }
933:   PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);
934:   PetscHMapIVCreate(&hmap);
935:   PetscHMapIVResize(hmap,cmaxr);
936:   ISGetIndices(map,&mappingindices);
937:   for (i=0; i<am && (pon || pn); i++) {
938:     PetscHMapIVClear(hmap);
939:     offset = i%dof;
940:     ii     = i/dof;
941:     nzi  = po->i[ii+1] - po->i[ii];
942:     dnzi = pd->i[ii+1] - pd->i[ii];
943:     if (!nzi && !dnzi) continue;
944:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
945:     voff = 0;
946:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
947:     if (!voff) continue;

949:     /* Form remote C(ii, :) */
950:     poj = po->j + po->i[ii];
951:     poa = po->a + po->i[ii];
952:     for (j=0; j<nzi; j++) {
953:       pocol = poj[j]*dof+offset;
954:       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
955:       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
956:       for (jj=0; jj<voff; jj++) {
957:         apvaluestmp[jj] = apvalues[jj]*poa[j];
958:         /*If the row is empty */
959:         if (!c_rmtc[pocol]) {
960:           c_rmtjj[jj] = apindices[jj];
961:           c_rmtaa[jj] = apvaluestmp[jj];
962:           c_rmtc[pocol]++;
963:         } else {
964:           PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);
965:           if (loc>=0){ /* hit */
966:             c_rmtaa[loc] += apvaluestmp[jj];
967:             PetscLogFlops(1.0);
968:           } else { /* new element */
969:             loc = -(loc+1);
970:             /* Move data backward */
971:             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
972:               c_rmtjj[kk] = c_rmtjj[kk-1];
973:               c_rmtaa[kk] = c_rmtaa[kk-1];
974:             }/* End kk */
975:             c_rmtjj[loc] = apindices[jj];
976:             c_rmtaa[loc] = apvaluestmp[jj];
977:             c_rmtc[pocol]++;
978:           }
979:         }
980:       } /* End jj */
981:       PetscLogFlops(voff);
982:     } /* End j */

984:     /* Form local C(ii, :) */
985:     pdj = pd->j + pd->i[ii];
986:     pda = pd->a + pd->i[ii];
987:     for (j=0; j<dnzi; j++) {
988:       row = pcstart + pdj[j] * dof + offset;
989:       for (jj=0; jj<voff; jj++) {
990:         apvaluestmp[jj] = apvalues[jj]*pda[j];
991:       }/* End kk */
992:       PetscLogFlops(voff);
993:       MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);
994:     }/* End j */
995:   } /* End i */

997:   ISRestoreIndices(map,&mappingindices);
998:   PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);
999:   PetscHMapIVDestroy(&hmap);
1000:   PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);

1002:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1003:   PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
1004:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1005:   PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
1006:   PetscFree2(c_rmtj,c_rmta);

1008:   /* Add contributions from remote */
1009:   for (i = 0; i < pn; i++) {
1010:     row = i + pcstart;
1011:     MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);
1012:   }
1013:   PetscFree2(c_othj,c_otha);

1015:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
1016:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

1018:   ptap->reuse = MAT_REUSE_MATRIX;
1019:   return(0);
1020: }

1022: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
1023: {
1024:   PetscErrorCode      ierr;


1028:   MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,C);
1029:   return(0);
1030: }

1032: /* TODO: move algorithm selection to MatProductSetFromOptions */
1033: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1034: {
1035:   Mat_APMPI           *ptap;
1036:   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1037:   MPI_Comm            comm;
1038:   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1039:   MatType             mtype;
1040:   PetscSF             sf;
1041:   PetscSFNode         *iremote;
1042:   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1043:   const PetscInt      *rootdegrees;
1044:   PetscHSetI          ht,oht,*hta,*hto;
1045:   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1046:   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1047:   PetscInt            nalg=2,alg=0,offset,ii;
1048:   PetscMPIInt         owner;
1049:   const PetscInt      *mappingindices;
1050:   PetscBool           flg;
1051:   const char          *algTypes[2] = {"overlapping","merged"};
1052:   IS                  map;
1053:   PetscErrorCode      ierr;

1056:   MatCheckProduct(Cmpi,5);
1057:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1058:   PetscObjectGetComm((PetscObject)A,&comm);

1060:   /* Create symbolic parallel matrix Cmpi */
1061:   MatGetLocalSize(P,NULL,&pn);
1062:   pn *= dof;
1063:   MatGetType(A,&mtype);
1064:   MatSetType(Cmpi,mtype);
1065:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);

1067:   PetscNew(&ptap);
1068:   ptap->reuse = MAT_INITIAL_MATRIX;
1069:   ptap->algType = 2;

1071:   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1072:   MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);
1073:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);
1074:   /* This equals to the number of offdiag columns in P */
1075:   MatGetLocalSize(p->B,NULL,&pon);
1076:   pon *= dof;
1077:   /* offsets */
1078:   PetscMalloc1(pon+1,&ptap->c_rmti);
1079:   /* The number of columns we will send to remote ranks */
1080:   PetscMalloc1(pon,&c_rmtc);
1081:   PetscMalloc1(pon,&hta);
1082:   for (i=0; i<pon; i++) {
1083:     PetscHSetICreate(&hta[i]);
1084:   }
1085:   MatGetLocalSize(A,&am,NULL);
1086:   MatGetOwnershipRange(A,&arstart,&arend);
1087:   /* Create hash table to merge all columns for C(i, :) */
1088:   PetscHSetICreate(&ht);

1090:   ISGetIndices(map,&mappingindices);
1091:   ptap->c_rmti[0] = 0;
1092:   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1093:   for (i=0; i<am && pon; i++) {
1094:     /* Form one row of AP */
1095:     PetscHSetIClear(ht);
1096:     offset = i%dof;
1097:     ii     = i/dof;
1098:     /* If the off diag is empty, we should not do any calculation */
1099:     nzi = po->i[ii+1] - po->i[ii];
1100:     if (!nzi) continue;

1102:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,ht);
1103:     PetscHSetIGetSize(ht,&htsize);
1104:     /* If AP is empty, just continue */
1105:     if (!htsize) continue;
1106:     /* Form C(ii, :) */
1107:     poj = po->j + po->i[ii];
1108:     for (j=0; j<nzi; j++) {
1109:       PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);
1110:     }
1111:   }

1113:   for (i=0; i<pon; i++) {
1114:     PetscHSetIGetSize(hta[i],&htsize);
1115:     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1116:     c_rmtc[i] = htsize;
1117:   }

1119:   PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);

1121:   for (i=0; i<pon; i++) {
1122:     off = 0;
1123:     PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);
1124:     PetscHSetIDestroy(&hta[i]);
1125:   }
1126:   PetscFree(hta);

1128:   PetscMalloc1(pon,&iremote);
1129:   for (i=0; i<pon; i++) {
1130:     owner = 0; lidx = 0;
1131:     offset = i%dof;
1132:     ii     = i/dof;
1133:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);
1134:     iremote[i].index = lidx*dof + offset;
1135:     iremote[i].rank  = owner;
1136:   }

1138:   PetscSFCreate(comm,&sf);
1139:   PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1140:   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1141:   PetscSFSetRankOrder(sf,PETSC_TRUE);
1142:   PetscSFSetFromOptions(sf);
1143:   PetscSFSetUp(sf);
1144:   /* How many neighbors have contributions to my rows? */
1145:   PetscSFComputeDegreeBegin(sf,&rootdegrees);
1146:   PetscSFComputeDegreeEnd(sf,&rootdegrees);
1147:   rootspacesize = 0;
1148:   for (i = 0; i < pn; i++) {
1149:     rootspacesize += rootdegrees[i];
1150:   }
1151:   PetscMalloc1(rootspacesize,&rootspace);
1152:   PetscMalloc1(rootspacesize+1,&rootspaceoffsets);
1153:   /* Get information from leaves
1154:    * Number of columns other people contribute to my rows
1155:    * */
1156:   PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);
1157:   PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);
1158:   PetscFree(c_rmtc);
1159:   PetscCalloc1(pn+1,&ptap->c_othi);
1160:   /* The number of columns is received for each row */
1161:   ptap->c_othi[0] = 0;
1162:   rootspacesize = 0;
1163:   rootspaceoffsets[0] = 0;
1164:   for (i = 0; i < pn; i++) {
1165:     rcvncols = 0;
1166:     for (j = 0; j<rootdegrees[i]; j++) {
1167:       rcvncols += rootspace[rootspacesize];
1168:       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1169:       rootspacesize++;
1170:     }
1171:     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1172:   }
1173:   PetscFree(rootspace);

1175:   PetscMalloc1(pon,&c_rmtoffsets);
1176:   PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1177:   PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1178:   PetscSFDestroy(&sf);
1179:   PetscFree(rootspaceoffsets);

1181:   PetscCalloc1(ptap->c_rmti[pon],&iremote);
1182:   nleaves = 0;
1183:   for (i = 0; i<pon; i++) {
1184:     owner = 0;
1185:     ii = i/dof;
1186:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);
1187:     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1188:     for (j=0; j<sendncols; j++) {
1189:       iremote[nleaves].rank = owner;
1190:       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1191:     }
1192:   }
1193:   PetscFree(c_rmtoffsets);
1194:   PetscCalloc1(ptap->c_othi[pn],&c_othj);

1196:   PetscSFCreate(comm,&ptap->sf);
1197:   PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1198:   PetscSFSetFromOptions(ptap->sf);
1199:   /* One to one map */
1200:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);

1202:   PetscMalloc2(pn,&dnz,pn,&onz);
1203:   PetscHSetICreate(&oht);
1204:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
1205:   pcstart *= dof;
1206:   pcend   *= dof;
1207:   PetscMalloc2(pn,&hta,pn,&hto);
1208:   for (i=0; i<pn; i++) {
1209:     PetscHSetICreate(&hta[i]);
1210:     PetscHSetICreate(&hto[i]);
1211:   }
1212:   /* Work on local part */
1213:   /* 4) Pass 1: Estimate memory for C_loc */
1214:   for (i=0; i<am && pn; i++) {
1215:     PetscHSetIClear(ht);
1216:     PetscHSetIClear(oht);
1217:     offset = i%dof;
1218:     ii     = i/dof;
1219:     nzi = pd->i[ii+1] - pd->i[ii];
1220:     if (!nzi) continue;

1222:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);
1223:     PetscHSetIGetSize(ht,&htsize);
1224:     PetscHSetIGetSize(oht,&htosize);
1225:     if (!(htsize+htosize)) continue;
1226:     /* Form C(ii, :) */
1227:     pdj = pd->j + pd->i[ii];
1228:     for (j=0; j<nzi; j++) {
1229:       PetscHSetIUpdate(hta[pdj[j]*dof+offset],ht);
1230:       PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);
1231:     }
1232:   }

1234:   ISRestoreIndices(map,&mappingindices);

1236:   PetscHSetIDestroy(&ht);
1237:   PetscHSetIDestroy(&oht);

1239:   /* Get remote data */
1240:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1241:   PetscFree(c_rmtj);

1243:   for (i = 0; i < pn; i++) {
1244:     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1245:     rdj = c_othj + ptap->c_othi[i];
1246:     for (j = 0; j < nzi; j++) {
1247:       col = rdj[j];
1248:       /* diag part */
1249:       if (col>=pcstart && col<pcend) {
1250:         PetscHSetIAdd(hta[i],col);
1251:       } else { /* off diag */
1252:         PetscHSetIAdd(hto[i],col);
1253:       }
1254:     }
1255:     PetscHSetIGetSize(hta[i],&htsize);
1256:     dnz[i] = htsize;
1257:     PetscHSetIDestroy(&hta[i]);
1258:     PetscHSetIGetSize(hto[i],&htsize);
1259:     onz[i] = htsize;
1260:     PetscHSetIDestroy(&hto[i]);
1261:   }

1263:   PetscFree2(hta,hto);
1264:   PetscFree(c_othj);

1266:   /* local sizes and preallocation */
1267:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1268:   MatSetBlockSizes(Cmpi,dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);
1269:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1270:   MatSetUp(Cmpi);
1271:   PetscFree2(dnz,onz);

1273:   /* attach the supporting struct to Cmpi for reuse */
1274:   Cmpi->product->data    = ptap;
1275:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1276:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1278:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1279:   Cmpi->assembled = PETSC_FALSE;
1280:   /* pick an algorithm */
1281:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");
1282:   alg = 0;
1283:   PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);
1284:   PetscOptionsEnd();
1285:   switch (alg) {
1286:     case 0:
1287:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1288:       break;
1289:     case 1:
1290:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1291:       break;
1292:     default:
1293:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1294:   }
1295:   return(0);
1296: }

1298: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat C)
1299: {

1303:   MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(A,P,1,fill,C);
1304:   return(0);
1305: }

1307: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1308: {
1309:   Mat_APMPI           *ptap;
1310:   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1311:   MPI_Comm            comm;
1312:   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1313:   MatType             mtype;
1314:   PetscSF             sf;
1315:   PetscSFNode         *iremote;
1316:   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1317:   const PetscInt      *rootdegrees;
1318:   PetscHSetI          ht,oht,*hta,*hto,*htd;
1319:   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1320:   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1321:   PetscInt            nalg=2,alg=0,offset,ii;
1322:   PetscMPIInt         owner;
1323:   PetscBool           flg;
1324:   const char          *algTypes[2] = {"merged","overlapping"};
1325:   const PetscInt      *mappingindices;
1326:   IS                  map;
1327:   PetscErrorCode      ierr;

1330:   MatCheckProduct(Cmpi,5);
1331:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1332:   PetscObjectGetComm((PetscObject)A,&comm);

1334:   /* Create symbolic parallel matrix Cmpi */
1335:   MatGetLocalSize(P,NULL,&pn);
1336:   pn *= dof;
1337:   MatGetType(A,&mtype);
1338:   MatSetType(Cmpi,mtype);
1339:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);

1341:   PetscNew(&ptap);
1342:   ptap->reuse = MAT_INITIAL_MATRIX;
1343:   ptap->algType = 3;

1345:   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1346:   MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);
1347:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);

1349:   /* This equals to the number of offdiag columns in P */
1350:   MatGetLocalSize(p->B,NULL,&pon);
1351:   pon *= dof;
1352:   /* offsets */
1353:   PetscMalloc1(pon+1,&ptap->c_rmti);
1354:   /* The number of columns we will send to remote ranks */
1355:   PetscMalloc1(pon,&c_rmtc);
1356:   PetscMalloc1(pon,&hta);
1357:   for (i=0; i<pon; i++) {
1358:     PetscHSetICreate(&hta[i]);
1359:   }
1360:   MatGetLocalSize(A,&am,NULL);
1361:   MatGetOwnershipRange(A,&arstart,&arend);
1362:   /* Create hash table to merge all columns for C(i, :) */
1363:   PetscHSetICreate(&ht);
1364:   PetscHSetICreate(&oht);
1365:   PetscMalloc2(pn,&htd,pn,&hto);
1366:   for (i=0; i<pn; i++) {
1367:     PetscHSetICreate(&htd[i]);
1368:     PetscHSetICreate(&hto[i]);
1369:   }

1371:   ISGetIndices(map,&mappingindices);
1372:   ptap->c_rmti[0] = 0;
1373:   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1374:   for (i=0; i<am && (pon || pn); i++) {
1375:     /* Form one row of AP */
1376:     PetscHSetIClear(ht);
1377:     PetscHSetIClear(oht);
1378:     offset = i%dof;
1379:     ii     = i/dof;
1380:     /* If the off diag is empty, we should not do any calculation */
1381:     nzi = po->i[ii+1] - po->i[ii];
1382:     dnzi = pd->i[ii+1] - pd->i[ii];
1383:     if (!nzi && !dnzi) continue;

1385:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);
1386:     PetscHSetIGetSize(ht,&htsize);
1387:     PetscHSetIGetSize(oht,&htosize);
1388:     /* If AP is empty, just continue */
1389:     if (!(htsize+htosize)) continue;

1391:     /* Form remote C(ii, :) */
1392:     poj = po->j + po->i[ii];
1393:     for (j=0; j<nzi; j++) {
1394:       PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);
1395:       PetscHSetIUpdate(hta[poj[j]*dof+offset],oht);
1396:     }

1398:     /* Form local C(ii, :) */
1399:     pdj = pd->j + pd->i[ii];
1400:     for (j=0; j<dnzi; j++) {
1401:       PetscHSetIUpdate(htd[pdj[j]*dof+offset],ht);
1402:       PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);
1403:     }
1404:   }

1406:   ISRestoreIndices(map,&mappingindices);

1408:   PetscHSetIDestroy(&ht);
1409:   PetscHSetIDestroy(&oht);

1411:   for (i=0; i<pon; i++) {
1412:     PetscHSetIGetSize(hta[i],&htsize);
1413:     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1414:     c_rmtc[i] = htsize;
1415:   }

1417:   PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);

1419:   for (i=0; i<pon; i++) {
1420:     off = 0;
1421:     PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);
1422:     PetscHSetIDestroy(&hta[i]);
1423:   }
1424:   PetscFree(hta);

1426:   PetscMalloc1(pon,&iremote);
1427:   for (i=0; i<pon; i++) {
1428:     owner = 0; lidx = 0;
1429:     offset = i%dof;
1430:     ii     = i/dof;
1431:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);
1432:     iremote[i].index = lidx*dof+offset;
1433:     iremote[i].rank  = owner;
1434:   }

1436:   PetscSFCreate(comm,&sf);
1437:   PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1438:   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1439:   PetscSFSetRankOrder(sf,PETSC_TRUE);
1440:   PetscSFSetFromOptions(sf);
1441:   PetscSFSetUp(sf);
1442:   /* How many neighbors have contributions to my rows? */
1443:   PetscSFComputeDegreeBegin(sf,&rootdegrees);
1444:   PetscSFComputeDegreeEnd(sf,&rootdegrees);
1445:   rootspacesize = 0;
1446:   for (i = 0; i < pn; i++) {
1447:     rootspacesize += rootdegrees[i];
1448:   }
1449:   PetscMalloc1(rootspacesize,&rootspace);
1450:   PetscMalloc1(rootspacesize+1,&rootspaceoffsets);
1451:   /* Get information from leaves
1452:    * Number of columns other people contribute to my rows
1453:    * */
1454:   PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);
1455:   PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);
1456:   PetscFree(c_rmtc);
1457:   PetscMalloc1(pn+1,&ptap->c_othi);
1458:   /* The number of columns is received for each row */
1459:   ptap->c_othi[0]     = 0;
1460:   rootspacesize       = 0;
1461:   rootspaceoffsets[0] = 0;
1462:   for (i = 0; i < pn; i++) {
1463:     rcvncols = 0;
1464:     for (j = 0; j<rootdegrees[i]; j++) {
1465:       rcvncols += rootspace[rootspacesize];
1466:       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1467:       rootspacesize++;
1468:     }
1469:     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1470:   }
1471:   PetscFree(rootspace);

1473:   PetscMalloc1(pon,&c_rmtoffsets);
1474:   PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1475:   PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1476:   PetscSFDestroy(&sf);
1477:   PetscFree(rootspaceoffsets);

1479:   PetscCalloc1(ptap->c_rmti[pon],&iremote);
1480:   nleaves = 0;
1481:   for (i = 0; i<pon; i++) {
1482:     owner = 0;
1483:     ii    = i/dof;
1484:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);
1485:     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1486:     for (j=0; j<sendncols; j++) {
1487:       iremote[nleaves].rank    = owner;
1488:       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1489:     }
1490:   }
1491:   PetscFree(c_rmtoffsets);
1492:   PetscCalloc1(ptap->c_othi[pn],&c_othj);

1494:   PetscSFCreate(comm,&ptap->sf);
1495:   PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1496:   PetscSFSetFromOptions(ptap->sf);
1497:   /* One to one map */
1498:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1499:   /* Get remote data */
1500:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1501:   PetscFree(c_rmtj);
1502:   PetscMalloc2(pn,&dnz,pn,&onz);
1503:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
1504:   pcstart *= dof;
1505:   pcend   *= dof;
1506:   for (i = 0; i < pn; i++) {
1507:     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1508:     rdj = c_othj + ptap->c_othi[i];
1509:     for (j = 0; j < nzi; j++) {
1510:       col =  rdj[j];
1511:       /* diag part */
1512:       if (col>=pcstart && col<pcend) {
1513:         PetscHSetIAdd(htd[i],col);
1514:       } else { /* off diag */
1515:         PetscHSetIAdd(hto[i],col);
1516:       }
1517:     }
1518:     PetscHSetIGetSize(htd[i],&htsize);
1519:     dnz[i] = htsize;
1520:     PetscHSetIDestroy(&htd[i]);
1521:     PetscHSetIGetSize(hto[i],&htsize);
1522:     onz[i] = htsize;
1523:     PetscHSetIDestroy(&hto[i]);
1524:   }

1526:   PetscFree2(htd,hto);
1527:   PetscFree(c_othj);

1529:   /* local sizes and preallocation */
1530:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1531:   MatSetBlockSizes(Cmpi, dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);
1532:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1533:   PetscFree2(dnz,onz);

1535:   /* attach the supporting struct to Cmpi for reuse */
1536:   Cmpi->product->data    = ptap;
1537:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1538:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1540:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1541:   Cmpi->assembled = PETSC_FALSE;
1542:   /* pick an algorithm */
1543:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");
1544:   alg = 0;
1545:   PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);
1546:   PetscOptionsEnd();
1547:   switch (alg) {
1548:     case 0:
1549:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1550:       break;
1551:     case 1:
1552:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1553:       break;
1554:     default:
1555:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1556:   }
1557:   return(0);
1558: }

1560: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat C)
1561: {

1565:   MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,fill,C);
1566:   return(0);
1567: }

1569: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat Cmpi)
1570: {
1571:   PetscErrorCode      ierr;
1572:   Mat_APMPI           *ptap;
1573:   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1574:   MPI_Comm            comm;
1575:   PetscMPIInt         size,rank;
1576:   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1577:   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1578:   PetscInt            *lnk,i,k,pnz,row,nsend;
1579:   PetscBT             lnkbt;
1580:   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,nrecv;
1581:   PETSC_UNUSED PetscMPIInt icompleted=0;
1582:   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1583:   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1584:   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1585:   MPI_Request         *swaits,*rwaits;
1586:   MPI_Status          *sstatus,rstatus;
1587:   PetscLayout         rowmap;
1588:   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1589:   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1590:   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1591:   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1592:   PetscScalar         *apv;
1593:   PetscTable          ta;
1594:   MatType             mtype;
1595:   const char          *prefix;
1596: #if defined(PETSC_USE_INFO)
1597:   PetscReal           apfill;
1598: #endif

1601:   MatCheckProduct(Cmpi,4);
1602:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1603:   PetscObjectGetComm((PetscObject)A,&comm);
1604:   MPI_Comm_size(comm,&size);
1605:   MPI_Comm_rank(comm,&rank);

1607:   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;

1609:   /* create symbolic parallel matrix Cmpi */
1610:   MatGetType(A,&mtype);
1611:   MatSetType(Cmpi,mtype);

1613:   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1614:   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;

1616:   /* create struct Mat_APMPI and attached it to C later */
1617:   PetscNew(&ptap);
1618:   ptap->reuse = MAT_INITIAL_MATRIX;
1619:   ptap->algType = 1;

1621:   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1622:   MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
1623:   /* get P_loc by taking all local rows of P */
1624:   MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);

1626:   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1627:   /* --------------------------------- */
1628:   MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);
1629:   MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);

1631:   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1632:   /* ----------------------------------------------------------------- */
1633:   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1634:   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;

1636:   /* create and initialize a linked list */
1637:   PetscTableCreate(pn,pN,&ta); /* for compute AP_loc and Cmpi */
1638:   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1639:   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1640:   PetscTableGetCount(ta,&Crmax); /* Crmax = nnz(sum of Prows) */
1641:   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */

1643:   PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);

1645:   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1646:   if (ao) {
1647:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);
1648:   } else {
1649:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);
1650:   }
1651:   current_space = free_space;
1652:   nspacedouble  = 0;

1654:   PetscMalloc1(am+1,&api);
1655:   api[0] = 0;
1656:   for (i=0; i<am; i++) {
1657:     /* diagonal portion: Ad[i,:]*P */
1658:     ai = ad->i; pi = p_loc->i;
1659:     nzi = ai[i+1] - ai[i];
1660:     aj  = ad->j + ai[i];
1661:     for (j=0; j<nzi; j++) {
1662:       row  = aj[j];
1663:       pnz  = pi[row+1] - pi[row];
1664:       Jptr = p_loc->j + pi[row];
1665:       /* add non-zero cols of P into the sorted linked list lnk */
1666:       PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);
1667:     }
1668:     /* off-diagonal portion: Ao[i,:]*P */
1669:     if (ao) {
1670:       ai = ao->i; pi = p_oth->i;
1671:       nzi = ai[i+1] - ai[i];
1672:       aj  = ao->j + ai[i];
1673:       for (j=0; j<nzi; j++) {
1674:         row  = aj[j];
1675:         pnz  = pi[row+1] - pi[row];
1676:         Jptr = p_oth->j + pi[row];
1677:         PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);
1678:       }
1679:     }
1680:     apnz     = lnk[0];
1681:     api[i+1] = api[i] + apnz;
1682:     if (ap_rmax < apnz) ap_rmax = apnz;

1684:     /* if free space is not available, double the total space in the list */
1685:     if (current_space->local_remaining<apnz) {
1686:       PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);
1687:       nspacedouble++;
1688:     }

1690:     /* Copy data into free space, then initialize lnk */
1691:     PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);

1693:     current_space->array           += apnz;
1694:     current_space->local_used      += apnz;
1695:     current_space->local_remaining -= apnz;
1696:   }
1697:   /* Allocate space for apj and apv, initialize apj, and */
1698:   /* destroy list of free space and other temporary array(s) */
1699:   PetscMalloc2(api[am],&apj,api[am],&apv);
1700:   PetscFreeSpaceContiguous(&free_space,apj);
1701:   PetscLLDestroy(lnk,lnkbt);

1703:   /* Create AP_loc for reuse */
1704:   MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);

1706: #if defined(PETSC_USE_INFO)
1707:   if (ao) {
1708:     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1709:   } else {
1710:     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1711:   }
1712:   ptap->AP_loc->info.mallocs           = nspacedouble;
1713:   ptap->AP_loc->info.fill_ratio_given  = fill;
1714:   ptap->AP_loc->info.fill_ratio_needed = apfill;

1716:   if (api[am]) {
1717:     PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);
1718:     PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);
1719:   } else {
1720:     PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");
1721:   }
1722: #endif

1724:   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1725:   /* ------------------------------------ */
1726:   MatGetOptionsPrefix(A,&prefix);
1727:   MatSetOptionsPrefix(ptap->Ro,prefix);
1728:   MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");

1730:   MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);
1731:   MatGetOptionsPrefix(Cmpi,&prefix);
1732:   MatSetOptionsPrefix(ptap->C_oth,prefix);
1733:   MatAppendOptionsPrefix(ptap->C_oth,"inner_C_oth_");

1735:   MatProductSetType(ptap->C_oth,MATPRODUCT_AB);
1736:   MatProductSetAlgorithm(ptap->C_oth,"default");
1737:   MatProductSetFill(ptap->C_oth,fill);
1738:   MatProductSetFromOptions(ptap->C_oth);
1739:   MatProductSymbolic(ptap->C_oth);

1741:   /* (3) send coj of C_oth to other processors  */
1742:   /* ------------------------------------------ */
1743:   /* determine row ownership */
1744:   PetscLayoutCreate(comm,&rowmap);
1745:   rowmap->n  = pn;
1746:   rowmap->bs = 1;
1747:   PetscLayoutSetUp(rowmap);
1748:   owners = rowmap->range;

1750:   /* determine the number of messages to send, their lengths */
1751:   PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);
1752:   PetscArrayzero(len_s,size);
1753:   PetscArrayzero(len_si,size);

1755:   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1756:   coi   = c_oth->i; coj = c_oth->j;
1757:   con   = ptap->C_oth->rmap->n;
1758:   proc  = 0;
1759:   for (i=0; i<con; i++) {
1760:     while (prmap[i] >= owners[proc+1]) proc++;
1761:     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1762:     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1763:   }

1765:   len          = 0; /* max length of buf_si[], see (4) */
1766:   owners_co[0] = 0;
1767:   nsend        = 0;
1768:   for (proc=0; proc<size; proc++) {
1769:     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1770:     if (len_s[proc]) {
1771:       nsend++;
1772:       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1773:       len         += len_si[proc];
1774:     }
1775:   }

1777:   /* determine the number and length of messages to receive for coi and coj  */
1778:   PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);
1779:   PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);

1781:   /* post the Irecv and Isend of coj */
1782:   PetscCommGetNewTag(comm,&tagj);
1783:   PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);
1784:   PetscMalloc1(nsend+1,&swaits);
1785:   for (proc=0, k=0; proc<size; proc++) {
1786:     if (!len_s[proc]) continue;
1787:     i    = owners_co[proc];
1788:     MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);
1789:     k++;
1790:   }

1792:   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1793:   /* ---------------------------------------- */
1794:   MatSetOptionsPrefix(ptap->Rd,prefix);
1795:   MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");

1797:   MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);
1798:   MatGetOptionsPrefix(Cmpi,&prefix);
1799:   MatSetOptionsPrefix(ptap->C_loc,prefix);
1800:   MatAppendOptionsPrefix(ptap->C_loc,"inner_C_loc_");

1802:   MatProductSetType(ptap->C_loc,MATPRODUCT_AB);
1803:   MatProductSetAlgorithm(ptap->C_loc,"default");
1804:   MatProductSetFill(ptap->C_loc,fill);
1805:   MatProductSetFromOptions(ptap->C_loc);
1806:   MatProductSymbolic(ptap->C_loc);

1808:   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;

1810:   /* receives coj are complete */
1811:   for (i=0; i<nrecv; i++) {
1812:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
1813:   }
1814:   PetscFree(rwaits);
1815:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

1817:   /* add received column indices into ta to update Crmax */
1818:   for (k=0; k<nrecv; k++) {/* k-th received message */
1819:     Jptr = buf_rj[k];
1820:     for (j=0; j<len_r[k]; j++) {
1821:       PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);
1822:     }
1823:   }
1824:   PetscTableGetCount(ta,&Crmax);
1825:   PetscTableDestroy(&ta);

1827:   /* (4) send and recv coi */
1828:   /*-----------------------*/
1829:   PetscCommGetNewTag(comm,&tagi);
1830:   PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);
1831:   PetscMalloc1(len+1,&buf_s);
1832:   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1833:   for (proc=0,k=0; proc<size; proc++) {
1834:     if (!len_s[proc]) continue;
1835:     /* form outgoing message for i-structure:
1836:          buf_si[0]:                 nrows to be sent
1837:                [1:nrows]:           row index (global)
1838:                [nrows+1:2*nrows+1]: i-structure index
1839:     */
1840:     /*-------------------------------------------*/
1841:     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1842:     buf_si_i    = buf_si + nrows+1;
1843:     buf_si[0]   = nrows;
1844:     buf_si_i[0] = 0;
1845:     nrows       = 0;
1846:     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1847:       nzi = coi[i+1] - coi[i];
1848:       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1849:       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1850:       nrows++;
1851:     }
1852:     MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);
1853:     k++;
1854:     buf_si += len_si[proc];
1855:   }
1856:   for (i=0; i<nrecv; i++) {
1857:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
1858:   }
1859:   PetscFree(rwaits);
1860:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

1862:   PetscFree4(len_s,len_si,sstatus,owners_co);
1863:   PetscFree(len_ri);
1864:   PetscFree(swaits);
1865:   PetscFree(buf_s);

1867:   /* (5) compute the local portion of Cmpi      */
1868:   /* ------------------------------------------ */
1869:   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1870:   PetscFreeSpaceGet(Crmax,&free_space);
1871:   current_space = free_space;

1873:   PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);
1874:   for (k=0; k<nrecv; k++) {
1875:     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1876:     nrows       = *buf_ri_k[k];
1877:     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1878:     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1879:   }

1881:   MatPreallocateInitialize(comm,pn,pn,dnz,onz);
1882:   PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);
1883:   for (i=0; i<pn; i++) {
1884:     /* add C_loc into Cmpi */
1885:     nzi  = c_loc->i[i+1] - c_loc->i[i];
1886:     Jptr = c_loc->j + c_loc->i[i];
1887:     PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);

1889:     /* add received col data into lnk */
1890:     for (k=0; k<nrecv; k++) { /* k-th received message */
1891:       if (i == *nextrow[k]) { /* i-th row */
1892:         nzi  = *(nextci[k]+1) - *nextci[k];
1893:         Jptr = buf_rj[k] + *nextci[k];
1894:         PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);
1895:         nextrow[k]++; nextci[k]++;
1896:       }
1897:     }
1898:     nzi = lnk[0];

1900:     /* copy data into free space, then initialize lnk */
1901:     PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);
1902:     MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);
1903:   }
1904:   PetscFree3(buf_ri_k,nextrow,nextci);
1905:   PetscLLDestroy(lnk,lnkbt);
1906:   PetscFreeSpaceDestroy(free_space);

1908:   /* local sizes and preallocation */
1909:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1910:   if (P->cmap->bs > 0) {
1911:     PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);
1912:     PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);
1913:   }
1914:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1915:   MatPreallocateFinalize(dnz,onz);

1917:   /* members in merge */
1918:   PetscFree(id_r);
1919:   PetscFree(len_r);
1920:   PetscFree(buf_ri[0]);
1921:   PetscFree(buf_ri);
1922:   PetscFree(buf_rj[0]);
1923:   PetscFree(buf_rj);
1924:   PetscLayoutDestroy(&rowmap);

1926:   PetscCalloc1(pN,&ptap->apa);

1928:   /* attach the supporting struct to Cmpi for reuse */
1929:   Cmpi->product->data    = ptap;
1930:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1931:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1933:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1934:   Cmpi->assembled = PETSC_FALSE;
1935:   return(0);
1936: }

1938: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1939: {
1940:   PetscErrorCode    ierr;
1941:   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1942:   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1943:   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1944:   Mat_APMPI         *ptap;
1945:   Mat               AP_loc,C_loc,C_oth;
1946:   PetscInt          i,rstart,rend,cm,ncols,row;
1947:   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1948:   PetscScalar       *apa;
1949:   const PetscInt    *cols;
1950:   const PetscScalar *vals;

1953:   MatCheckProduct(C,3);
1954:   ptap = (Mat_APMPI*)C->product->data;
1955:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
1956:   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

1958:   MatZeroEntries(C);
1959:   /* 1) get R = Pd^T,Ro = Po^T */
1960:   if (ptap->reuse == MAT_REUSE_MATRIX) {
1961:     MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);
1962:     MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);
1963:   }

1965:   /* 2) get AP_loc */
1966:   AP_loc = ptap->AP_loc;
1967:   ap = (Mat_SeqAIJ*)AP_loc->data;

1969:   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1970:   /*-----------------------------------------------------*/
1971:   if (ptap->reuse == MAT_REUSE_MATRIX) {
1972:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1973:     MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
1974:     MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);
1975:   }

1977:   /* 2-2) compute numeric A_loc*P - dominating part */
1978:   /* ---------------------------------------------- */
1979:   /* get data from symbolic products */
1980:   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1981:   if (ptap->P_oth) {
1982:     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1983:   }
1984:   apa   = ptap->apa;
1985:   api   = ap->i;
1986:   apj   = ap->j;
1987:   for (i=0; i<am; i++) {
1988:     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1989:     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1990:     apnz = api[i+1] - api[i];
1991:     for (j=0; j<apnz; j++) {
1992:       col = apj[j+api[i]];
1993:       ap->a[j+ap->i[i]] = apa[col];
1994:       apa[col] = 0.0;
1995:     }
1996:   }
1997:   /* We have modified the contents of local matrix AP_loc and must increase its ObjectState, since we are not doing AssemblyBegin/End on it. */
1998:   PetscObjectStateIncrease((PetscObject)AP_loc);

2000:   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
2001:   ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);
2002:   ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);
2003:   C_loc = ptap->C_loc;
2004:   C_oth = ptap->C_oth;

2006:   /* add C_loc and Co to to C */
2007:   MatGetOwnershipRange(C,&rstart,&rend);

2009:   /* C_loc -> C */
2010:   cm    = C_loc->rmap->N;
2011:   c_seq = (Mat_SeqAIJ*)C_loc->data;
2012:   cols = c_seq->j;
2013:   vals = c_seq->a;


2016:   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2017:   /* when there are no off-processor parts.  */
2018:   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2019:   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2020:   /* a table, and other, more complex stuff has to be done. */
2021:   if (C->assembled) {
2022:     C->was_assembled = PETSC_TRUE;
2023:     C->assembled     = PETSC_FALSE;
2024:   }
2025:   if (C->was_assembled) {
2026:     for (i=0; i<cm; i++) {
2027:       ncols = c_seq->i[i+1] - c_seq->i[i];
2028:       row = rstart + i;
2029:       MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);
2030:       cols += ncols; vals += ncols;
2031:     }
2032:   } else {
2033:     MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);
2034:   }

2036:   /* Co -> C, off-processor part */
2037:   cm = C_oth->rmap->N;
2038:   c_seq = (Mat_SeqAIJ*)C_oth->data;
2039:   cols = c_seq->j;
2040:   vals = c_seq->a;
2041:   for (i=0; i<cm; i++) {
2042:     ncols = c_seq->i[i+1] - c_seq->i[i];
2043:     row = p->garray[i];
2044:     MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);
2045:     cols += ncols; vals += ncols;
2046:   }

2048:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
2049:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

2051:   ptap->reuse = MAT_REUSE_MATRIX;
2052:   return(0);
2053: }

2055: PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat C)
2056: {
2057:   PetscErrorCode      ierr;
2058:   Mat_Product         *product = C->product;
2059:   Mat                 A=product->A,P=product->B;
2060:   MatProductAlgorithm alg=product->alg;
2061:   PetscReal           fill=product->fill;
2062:   PetscBool           flg;

2065:   /* scalable: do R=P^T locally, then C=R*A*P */
2066:   PetscStrcmp(alg,"scalable",&flg);
2067:   if (flg) {
2068:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,product->fill,C);
2069:     C->ops->productnumeric = MatProductNumeric_PtAP;
2070:     goto next;
2071:   }

2073:   /* nonscalable: do R=P^T locally, then C=R*A*P */
2074:   PetscStrcmp(alg,"nonscalable",&flg);
2075:   if (flg) {
2076:     MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);
2077:     goto next;
2078:   }

2080:   /* allatonce */
2081:   PetscStrcmp(alg,"allatonce",&flg);
2082:   if (flg) {
2083:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);
2084:     goto next;
2085:   }

2087:   /* allatonce_merged */
2088:   PetscStrcmp(alg,"allatonce_merged",&flg);
2089:   if (flg) {
2090:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);
2091:     goto next;
2092:   }

2094:   /* hypre */
2095: #if defined(PETSC_HAVE_HYPRE)
2096:   PetscStrcmp(alg,"hypre",&flg);
2097:   if (flg) {
2098:     MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);
2099:     C->ops->productnumeric = MatProductNumeric_PtAP;
2100:     return(0);
2101:   }
2102: #endif
2103:   SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"Mat Product Algorithm is not supported");

2105: next:
2106:   C->ops->productnumeric = MatProductNumeric_PtAP;
2107:   return(0);
2108: }