Actual source code: vecnode.c
petsc-3.12.5 2020-03-29
2: #include <../src/vec/vec/impls/node/vecnodeimpl.h>
3: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
5: #if defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
7: PetscErrorCode VecSetValues_Node(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
8: {
10: SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_SUP,"Not implemented yet");
11: return(0);
12: }
14: /* check all blocks are filled */
15: static PetscErrorCode VecAssemblyBegin_Node(Vec v)
16: {
18: return(0);
19: }
21: static PetscErrorCode VecAssemblyEnd_Node(Vec v)
22: {
23: Vec_Node *s = (Vec_Node*)v->data;
26: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
27: /* printf("VecAssemblyEnd_Node s->array[-1] %g\n",s->array[-1]); */
28: return(0);
29: }
31: static PetscErrorCode VecScale_Node(Vec v, PetscScalar alpha)
32: {
34: Vec_Node *s = (Vec_Node*)v->data;
37: VecScale_Seq(v,alpha);
38: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
39: /* printf("VecScale_Node s->array[-1] %g\n",s->array[-1]); */
40: return(0);
41: }
43: static PetscErrorCode VecCopy_Node(Vec v,Vec y)
44: {
46: Vec_Node *s = (Vec_Node*)y->data;
49: VecCopy_Seq(v,y);
50: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of y */
51: return(0);
52: }
54: static PetscErrorCode VecSet_Node(Vec v,PetscScalar alpha)
55: {
57: Vec_Node *s = (Vec_Node*)v->data;
60: VecSet_Seq(v,alpha);
61: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
62: /* printf("VecSet_Node s->array[-1] %g\n",s->array[-1]); */
63: return(0);
64: }
66: static PetscErrorCode VecDestroy_Node(Vec v)
67: {
68: Vec_Node *vs = (Vec_Node*)v->data;
72: MPI_Win_free(&vs->win);
73: MPI_Comm_free(&vs->shmcomm);
74: PetscFree(vs->winarray);
75: PetscFree(vs);
76: return(0);
77: }
79: static PetscErrorCode VecDuplicate_Node(Vec x,Vec *y)
80: {
84: VecCreate(PetscObjectComm((PetscObject)x),y);
85: VecSetSizes(*y,x->map->n,x->map->N);
86: VecSetType(*y,((PetscObject)x)->type_name);
87: PetscLayoutReference(x->map,&(*y)->map);
88: PetscObjectListDuplicate(((PetscObject)x)->olist,&((PetscObject)(*y))->olist);
89: PetscFunctionListDuplicate(((PetscObject)x)->qlist,&((PetscObject)(*y))->qlist);
91: PetscMemcpy((*y)->ops,x->ops,sizeof(struct _VecOps));
93: /* New vector should inherit stashing property of parent */
94: (*y)->stash.donotstash = x->stash.donotstash;
95: (*y)->stash.ignorenegidx = x->stash.ignorenegidx;
97: (*y)->map->bs = PetscAbs(x->map->bs);
98: (*y)->bstash.bs = x->bstash.bs;
99: return(0);
100: }
102: static PetscErrorCode VecAYPX_Node(Vec y,PetscScalar alpha,Vec x)
103: {
105: Vec_Node *s = (Vec_Node*)y->data;
108: VecAYPX_Seq(y,alpha,x);
109: s->array[-1] += 1.0;
110: return(0);
111: }
113: static PetscErrorCode VecAXPBY_Node(Vec y,PetscScalar alpha,PetscScalar beta,Vec x)
114: {
116: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
117: return(0);
118: }
120: static PetscErrorCode VecAXPBYPCZ_Node(Vec z,PetscScalar alpha,PetscScalar beta,PetscScalar gamma,Vec x,Vec y)
121: {
123: Vec_Node *s = (Vec_Node*)z->data;
126: VecAXPBYPCZ_Seq(z,alpha,beta,gamma,x,y);
127: s->array[-1] += 1.0;
128: return(0);
129: }
132: static PetscErrorCode VecConjugate_Node(Vec x)
133: {
135: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
136: return(0);
137: }
139: static PetscErrorCode VecWAXPY_Node(Vec w,PetscScalar alpha,Vec x,Vec y)
140: {
142: Vec_Node *s = (Vec_Node*)w->data;
145: VecWAXPY_Seq(w,alpha,x,y);
146: s->array[-1] += 1.0;
147: return(0);
148: }
150: static PetscErrorCode VecMax_Node(Vec x,PetscInt *p,PetscReal *max)
151: {
153: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
154: return(0);
155: }
157: static PetscErrorCode VecMin_Node(Vec x,PetscInt *p,PetscReal *min)
158: {
160: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
161: return(0);
162: }
164: /* supports nested blocks */
165: static PetscErrorCode VecView_Node(Vec x,PetscViewer viewer)
166: {
170: VecView_MPI(x,viewer);
171: return(0);
172: }
174: static PetscErrorCode VecGetArray_Node(Vec x,PetscScalar **a)
175: {
176: Vec_Node *s = (Vec_Node*)x->data;
178: *a = s->array;
179: return(0);
180: }
182: static PetscErrorCode VecRestoreArray_Node(Vec x,PetscScalar **a)
183: {
184: Vec_Node *s = (Vec_Node*)x->data;
187: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
188: /* printf("VecRestoreArray_Node s->array[-1] %g\n",s->array[-1]); */
189: return(0);
190: }
192: static PetscErrorCode VecGetArrayRead_Node(Vec x,const PetscScalar **a)
193: {
194: Vec_Node *s = (Vec_Node*)x->data;
197: *a = s->array;
198: return(0);
199: }
201: /* This routine prevents VecRestoreArrayRead() calls VecRestoreArray_Node(), which increaments s->array[-1] */
202: static PetscErrorCode VecRestoreArrayRead_Node(Vec x,const PetscScalar **a)
203: {
205: return(0);
206: }
208: static struct _VecOps DvOps = { VecDuplicate_Node, /* 1 */
209: VecDuplicateVecs_Default,
210: VecDestroyVecs_Default,
211: VecDot_MPI,
212: VecMDot_MPI,
213: VecNorm_MPI,
214: VecTDot_MPI,
215: VecMTDot_MPI,
216: VecScale_Node,
217: VecCopy_Node, /* 10 */
218: VecSet_Node,
219: VecSwap_Seq,
220: VecAXPY_Seq,
221: VecAXPBY_Node,
222: VecMAXPY_Seq,
223: VecAYPX_Node,
224: VecWAXPY_Node,
225: VecAXPBYPCZ_Node,
226: 0,
227: 0,
228: VecSetValues_Node, /* 20 */
229: VecAssemblyBegin_Node,
230: VecAssemblyEnd_Node,
231: VecGetArray_Node,
232: VecGetSize_MPI,
233: VecGetSize_Seq,
234: VecRestoreArray_Node,
235: VecMax_Node,
236: VecMin_Node,
237: VecSetRandom_Seq,
238: 0,
239: VecSetValuesBlocked_Seq,
240: VecDestroy_Node,
241: VecView_Node,
242: VecPlaceArray_Seq,
243: VecReplaceArray_Seq,
244: VecDot_Seq,
245: VecTDot_Seq,
246: VecNorm_Seq,
247: VecMDot_Seq,
248: VecMTDot_Seq,
249: VecLoad_Default,
250: VecReciprocal_Default,
251: VecConjugate_Node,
252: 0,
253: 0,
254: VecResetArray_Seq,
255: 0,/*set from options */
256: 0,
257: 0,
258: 0,
259: 0,
260: 0,
261: 0,
262: 0,
263: 0,
264: 0,
265: 0,
266: 0,
267: 0,
268: 0,
269: 0,
270: 0,
271: 0,
272: VecGetArrayRead_Node,
273: VecRestoreArrayRead_Node,
274: VecStrideSubSetGather_Default,
275: VecStrideSubSetScatter_Default,
276: 0,
277: 0,
278: 0,
279: 0,
280: 0,
281: 0
282: };
284: /*@C
285: VecCreateNode - Creates a new parallel vector whose arrays are stored in shared memory
287: Collective on Vec
289: Input Parameter:
290: + comm - Communicator for the new Vec
291: . n - local vector length (or PETSC_DECIDE to have calculated if N is given)
292: - N - global vector length (or PETSC_DETERMINE to have calculated if n is given)
294: Output Parameter:
295: . v - new vector
297: Level: advanced
299: .seealso: VecCreate(), VecType(), VecCreateMPIWithArray(), VECNODE
300: @*/
301: PetscErrorCode VecCreateNode(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
302: {
306: VecCreate(comm,v);
307: VecSetSizes(*v,n,N);
308: VecSetType(*v,VECNODE);
309: return(0);
310: }
312: /*MC
313: VECNODE - VECNODE = "node" - Vector type uses on-node shared memory.
315: Level: intermediate
317: Notes:
318: This vector type uses on-node shared memory.
320: .seealso: VecCreate(), VecType.
321: M*/
323: PETSC_EXTERN PetscErrorCode VecCreate_Node(Vec v)
324: {
326: Vec_Node *s;
327: PetscBool alloc=PETSC_TRUE;
328: PetscScalar *array=NULL;
329: MPI_Comm shmcomm;
330: MPI_Win win;
333: PetscNewLog(v,&s);
334: v->data = (void*)s;
335: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
336: v->petscnative = PETSC_FALSE;
338: PetscLayoutSetUp(v->map);
340: s->array = (PetscScalar*)array;
341: s->array_allocated = 0;
343: if (alloc && !array) {
344: PetscInt n = v->map->n;
345: PetscMPIInt msize,mrank,disp_unit;
346: PetscInt i;
347: MPI_Aint sz;
349: MPI_Comm_split_type(PetscObjectComm((PetscObject)v),MPI_COMM_TYPE_SHARED,0,MPI_INFO_NULL,&shmcomm);
350: MPIU_Win_allocate_shared((n+1)*sizeof(PetscScalar),sizeof(PetscScalar),MPI_INFO_NULL,shmcomm,&s->array,&win);
351: PetscLogObjectMemory((PetscObject)v,(n+1)*sizeof(PetscScalar));
352: PetscArrayzero(s->array,n+1);
353: s->array++; /* create initial space for object state counter */
355: MPI_Comm_size(shmcomm,&msize);
356: MPI_Comm_rank(shmcomm,&mrank);
357: PetscMalloc1(msize,&s->winarray);
358: for (i=0; i<msize; i++) {
359: if (i != mrank) {
360: MPIU_Win_shared_query(win,i,&sz,&disp_unit,&s->winarray[i]);
361: s->winarray[i]++;
362: }
363: }
364: s->win = win;
365: s->shmcomm = shmcomm;
366: }
368: PetscObjectChangeTypeName((PetscObject)v,VECNODE);
369: return(0);
370: }
372: #endif