Actual source code: mpiviennacl.cxx
petsc-3.13.6 2020-09-29
2: /*
3: This file contains routines for Parallel vector operations.
4: */
5: #include <petscconf.h>
6: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
7: #include <../src/vec/vec/impls/seq/seqviennacl/viennaclvecimpl.h>
9: /*MC
10: VECVIENNACL - VECVIENNACL = "viennacl" - A VECSEQVIENNACL on a single-process communicator, and VECMPIVIENNACL otherwise.
12: Options Database Keys:
13: . -vec_type viennacl - sets the vector type to VECVIENNACL during a call to VecSetFromOptions()
15: Level: beginner
17: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECSEQVIENNACL, VECMPIVIENNACL, VECSTANDARD, VecType, VecCreateMPI(), VecCreateMPI()
18: M*/
20: PetscErrorCode VecDestroy_MPIViennaCL(Vec v)
21: {
25: try {
26: if (v->spptr) {
27: delete ((Vec_ViennaCL*)v->spptr)->GPUarray;
28: delete (Vec_ViennaCL*) v->spptr;
29: }
30: } catch(std::exception const & ex) {
31: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ViennaCL error: %s", ex.what());
32: }
33: VecDestroy_MPI(v);
34: return(0);
35: }
37: PetscErrorCode VecNorm_MPIViennaCL(Vec xin,NormType type,PetscReal *z)
38: {
39: PetscReal sum,work = 0.0;
43: if (type == NORM_2 || type == NORM_FROBENIUS) {
44: VecNorm_SeqViennaCL(xin,NORM_2,&work);
45: work *= work;
46: MPIU_Allreduce(&work,&sum,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
47: *z = PetscSqrtReal(sum);
48: } else if (type == NORM_1) {
49: /* Find the local part */
50: VecNorm_SeqViennaCL(xin,NORM_1,&work);
51: /* Find the global max */
52: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
53: } else if (type == NORM_INFINITY) {
54: /* Find the local max */
55: VecNorm_SeqViennaCL(xin,NORM_INFINITY,&work);
56: /* Find the global max */
57: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)xin));
58: } else if (type == NORM_1_AND_2) {
59: PetscReal temp[2];
60: VecNorm_SeqViennaCL(xin,NORM_1,temp);
61: VecNorm_SeqViennaCL(xin,NORM_2,temp+1);
62: temp[1] = temp[1]*temp[1];
63: MPIU_Allreduce(temp,z,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
64: z[1] = PetscSqrtReal(z[1]);
65: }
66: return(0);
67: }
69: PetscErrorCode VecDot_MPIViennaCL(Vec xin,Vec yin,PetscScalar *z)
70: {
71: PetscScalar sum,work;
75: VecDot_SeqViennaCL(xin,yin,&work);
76: MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
77: *z = sum;
78: return(0);
79: }
81: PetscErrorCode VecTDot_MPIViennaCL(Vec xin,Vec yin,PetscScalar *z)
82: {
83: PetscScalar sum,work;
87: VecTDot_SeqViennaCL(xin,yin,&work);
88: MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
89: *z = sum;
90: return(0);
91: }
93: PetscErrorCode VecMDot_MPIViennaCL(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z)
94: {
95: PetscScalar awork[128],*work = awork;
99: if (nv > 128) {
100: PetscMalloc1(nv,&work);
101: }
102: VecMDot_SeqViennaCL(xin,nv,y,work);
103: MPIU_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
104: if (nv > 128) {
105: PetscFree(work);
106: }
107: return(0);
108: }
110: /*MC
111: VECMPIVIENNACL - VECMPIVIENNACL = "mpiviennacl" - The basic parallel vector, modified to use ViennaCL
113: Options Database Keys:
114: . -vec_type mpiviennacl - sets the vector type to VECMPIVIENNACL during a call to VecSetFromOptions()
116: Level: beginner
118: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
119: M*/
122: PetscErrorCode VecDuplicate_MPIViennaCL(Vec win,Vec *v)
123: {
125: Vec_MPI *vw,*w = (Vec_MPI*)win->data;
126: PetscScalar *array;
129: VecCreate(PetscObjectComm((PetscObject)win),v);
130: PetscLayoutReference(win->map,&(*v)->map);
132: VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,0);
133: vw = (Vec_MPI*)(*v)->data;
134: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
136: /* save local representation of the parallel vector (and scatter) if it exists */
137: if (w->localrep) {
138: VecGetArray(*v,&array);
139: VecCreateSeqWithArray(PETSC_COMM_SELF,1,win->map->n+w->nghost,array,&vw->localrep);
140: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
141: VecRestoreArray(*v,&array);
142: PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);
143: vw->localupdate = w->localupdate;
144: if (vw->localupdate) {
145: PetscObjectReference((PetscObject)vw->localupdate);
146: }
147: }
149: /* New vector should inherit stashing property of parent */
150: (*v)->stash.donotstash = win->stash.donotstash;
151: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
153: /* change type_name appropriately */
154: PetscObjectChangeTypeName((PetscObject)(*v),VECMPIVIENNACL);
156: PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
157: PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
158: (*v)->map->bs = PetscAbs(win->map->bs);
159: (*v)->bstash.bs = win->bstash.bs;
160: return(0);
161: }
163: PetscErrorCode VecDotNorm2_MPIViennaCL(Vec s,Vec t,PetscScalar *dp,PetscScalar *nm)
164: {
166: PetscScalar work[2],sum[2];
169: VecDotNorm2_SeqViennaCL(s,t,work,work+1);
170: MPIU_Allreduce((void*)&work,(void*)&sum,2,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)s));
171: *dp = sum[0];
172: *nm = sum[1];
173: return(0);
174: }
176: PETSC_EXTERN PetscErrorCode VecCreate_MPIViennaCL(Vec vv)
177: {
181: VecCreate_MPI_Private(vv,PETSC_FALSE,0,0);
182: PetscObjectChangeTypeName((PetscObject)vv,VECMPIVIENNACL);
184: vv->ops->dotnorm2 = VecDotNorm2_MPIViennaCL;
185: vv->ops->waxpy = VecWAXPY_SeqViennaCL;
186: vv->ops->duplicate = VecDuplicate_MPIViennaCL;
187: vv->ops->dot = VecDot_MPIViennaCL;
188: vv->ops->mdot = VecMDot_MPIViennaCL;
189: vv->ops->tdot = VecTDot_MPIViennaCL;
190: vv->ops->norm = VecNorm_MPIViennaCL;
191: vv->ops->scale = VecScale_SeqViennaCL;
192: vv->ops->copy = VecCopy_SeqViennaCL;
193: vv->ops->set = VecSet_SeqViennaCL;
194: vv->ops->swap = VecSwap_SeqViennaCL;
195: vv->ops->axpy = VecAXPY_SeqViennaCL;
196: vv->ops->axpby = VecAXPBY_SeqViennaCL;
197: vv->ops->maxpy = VecMAXPY_SeqViennaCL;
198: vv->ops->aypx = VecAYPX_SeqViennaCL;
199: vv->ops->axpbypcz = VecAXPBYPCZ_SeqViennaCL;
200: vv->ops->pointwisemult = VecPointwiseMult_SeqViennaCL;
201: vv->ops->setrandom = VecSetRandom_SeqViennaCL;
202: vv->ops->dot_local = VecDot_SeqViennaCL;
203: vv->ops->tdot_local = VecTDot_SeqViennaCL;
204: vv->ops->norm_local = VecNorm_SeqViennaCL;
205: vv->ops->mdot_local = VecMDot_SeqViennaCL;
206: vv->ops->destroy = VecDestroy_MPIViennaCL;
207: vv->ops->pointwisedivide = VecPointwiseDivide_SeqViennaCL;
208: vv->ops->placearray = VecPlaceArray_SeqViennaCL;
209: vv->ops->replacearray = VecReplaceArray_SeqViennaCL;
210: vv->ops->resetarray = VecResetArray_SeqViennaCL;
211: /*
212: get values?
213: */
214: VecViennaCLAllocateCheck(vv);
215: VecViennaCLAllocateCheckHost(vv);
216: VecSet(vv,0.0);
217: VecSet_Seq(vv,0.0);
218: vv->offloadmask = PETSC_OFFLOAD_BOTH;
219: return(0);
220: }
223: PETSC_EXTERN PetscErrorCode VecCreate_ViennaCL(Vec v)
224: {
226: PetscMPIInt size;
229: MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);
230: if (size == 1) {
231: VecSetType(v,VECSEQVIENNACL);
232: } else {
233: VecSetType(v,VECMPIVIENNACL);
234: }
235: return(0);
236: }