Actual source code: ex146.c
petsc-3.13.6 2020-09-29
1: /* This program illustrates use of paralllel real FFT*/
2: static char help[]="This program illustrates the use of parallel real 3D fftw (without PETSc interface)";
3: #include <petscmat.h>
4: #include <fftw3.h>
5: #include <fftw3-mpi.h>
7: PetscInt main(PetscInt argc,char **args)
8: {
9: ptrdiff_t N0=256,N1=256,N2=256,N3=2,dim[4];
10: fftw_plan bplan,fplan;
11: fftw_complex *out;
12: double *in1,*in2;
13: ptrdiff_t alloc_local,local_n0,local_0_start;
14: ptrdiff_t local_n1,local_1_start;
15: PetscInt i,j,indx,n1;
16: PetscInt size,rank,n,N,*in,N_factor,NM;
17: PetscScalar *data_fin,value1,one=1.57,zero=0.0;
18: PetscScalar a,*x_arr,*y_arr,*z_arr,enorm;
19: Vec fin,fout,fout1,ini,final;
20: PetscRandom rnd;
22: VecScatter vecscat,vecscat1;
23: IS indx1,indx2;
24: PetscInt *indx3,k,l,*indx4;
25: PetscInt low,tempindx,tempindx1;
28: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
29: #if defined(PETSC_USE_COMPLEX)
30: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP, "This example requires real numbers. Your current scalar type is complex");
31: #endif
32: MPI_Comm_size(PETSC_COMM_WORLD, &size);
33: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
35: PetscRandomCreate(PETSC_COMM_WORLD,&rnd);
38: alloc_local = fftw_mpi_local_size_3d_transposed(N0,N1,N2/2+1,PETSC_COMM_WORLD,&local_n0,&local_0_start,&local_n1,&local_1_start);
40: /* printf("The value alloc_local is %ld from process %d\n",alloc_local,rank); */
41: printf("The value local_n0 is %ld from process %d\n",local_n0,rank);
42: /* printf("The value local_0_start is %ld from process %d\n",local_0_start,rank);*/
43: /* printf("The value local_n1 is %ld from process %d\n",local_n1,rank); */
44: /* printf("The value local_1_start is %ld from process %d\n",local_1_start,rank);*/
46: /* Allocate space for input and output arrays */
48: in1=(double*)fftw_malloc(sizeof(double)*alloc_local*2);
49: in2=(double*)fftw_malloc(sizeof(double)*alloc_local*2);
50: out=(fftw_complex*)fftw_malloc(sizeof(fftw_complex)*alloc_local);
53: N=2*N0*N1*(N2/2+1);N_factor=N0*N1*N2;
54: n=2*local_n0*N1*(N2/2+1);n1=local_n1*N0*2*N1;
56: /* printf("The value N is %d from process %d\n",N,rank); */
57: /* printf("The value n is %d from process %d\n",n,rank); */
58: /* printf("The value n1 is %d from process %d\n",n1,rank); */
59: /* Creating data vector and accompanying array with VeccreateMPIWithArray */
60: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)in1,&fin);
61: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)out,&fout);
62: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)in2,&fout1);
64: /* VecGetSize(fin,&size); */
65: /* printf("The size is %d\n",size); */
67: VecSet(fin,one);
68: VecSet(fout,zero);
69: VecSet(fout1,zero);
71: VecAssemblyBegin(fin);
72: VecAssemblyEnd(fin);
73: /* VecView(fin,PETSC_VIEWER_STDOUT_WORLD); */
76: VecGetArray(fin,&x_arr);
77: VecGetArray(fout1,&z_arr);
78: VecGetArray(fout,&y_arr);
80: fplan=fftw_mpi_plan_dft_r2c_3d(N0,N1,N2,(double*)x_arr,(fftw_complex*)y_arr,PETSC_COMM_WORLD,FFTW_ESTIMATE);
81: bplan=fftw_mpi_plan_dft_c2r_3d(N0,N1,N2,(fftw_complex*)y_arr,(double*)z_arr,PETSC_COMM_WORLD,FFTW_ESTIMATE);
83: fftw_execute(fplan);
84: fftw_execute(bplan);
86: VecRestoreArray(fin,&x_arr);
87: VecRestoreArray(fout1,&z_arr);
88: VecRestoreArray(fout,&y_arr);
91: /* a = 1.0/(PetscReal)N_factor; */
92: /* VecScale(fout1,a); */
93: VecCreate(PETSC_COMM_WORLD,&ini);
94: VecCreate(PETSC_COMM_WORLD,&final);
95: VecSetSizes(ini,local_n0*N1*N2,N_factor);
96: VecSetSizes(final,local_n0*N1*N2,N_factor);
97: /* VecSetSizes(ini,PETSC_DECIDE,N_factor); */
98: /* VecSetSizes(final,PETSC_DECIDE,N_factor); */
99: VecSetFromOptions(ini);
100: VecSetFromOptions(final);
102: if (N2%2==0) NM=N2+2;
103: else NM=N2+1;
105: VecGetOwnershipRange(fin,&low,NULL);
106: printf("The local index is %d from %d\n",low,rank);
107: PetscMalloc1(local_n0*N1*N2,&indx3);
108: PetscMalloc1(local_n0*N1*N2,&indx4);
109: for (i=0; i<local_n0; i++) {
110: for (j=0;j<N1;j++) {
111: for (k=0;k<N2;k++) {
112: tempindx = i*N1*N2 + j*N2 + k;
113: tempindx1 = i*N1*NM + j*NM + k;
115: indx3[tempindx]=local_0_start*N1*N2+tempindx;
116: indx4[tempindx]=low+tempindx1;
117: }
118: /* printf("index3 %d from proc %d is \n",indx3[tempindx],rank); */
119: /* printf("index4 %d from proc %d is \n",indx4[tempindx],rank); */
120: }
121: }
122: VecGetValues(fin,local_n0*N1*N2,indx4,x_arr);
123: VecSetValues(ini,local_n0*N1*N2,indx3,x_arr,INSERT_VALUES);
124: VecAssemblyBegin(ini);
125: VecAssemblyEnd(ini);
127: VecGetValues(fout1,local_n0*N1*N2,indx4,y_arr);
128: VecSetValues(final,local_n0*N1*N2,indx3,y_arr,INSERT_VALUES);
129: VecAssemblyBegin(final);
130: VecAssemblyEnd(final);
132: printf("The local index value is %ld from %d",local_n0*N1*N2,rank);
133: /*
134: for (i=0;i<N0;i++) {
135: for (j=0;j<N1;j++) {
136: indx=i*N1*NM+j*NM;
137: ISCreateStride(PETSC_COMM_WORLD,N2,indx,1,&indx1);
138: indx=i*N1*N2+j*N2;
139: ISCreateStride(PETSC_COMM_WORLD,N2,indx,1,&indx2);
140: VecScatterCreate(fin,indx1,ini,indx2,&vecscat);
141: VecScatterBegin(vecscat,fin,ini,INSERT_VALUES,SCATTER_FORWARD);
142: VecScatterEnd(vecscat,fin,ini,INSERT_VALUES,SCATTER_FORWARD);
143: VecScatterCreate(fout1,indx1,final,indx2,&vecscat1);
144: VecScatterBegin(vecscat1,fout1,final,INSERT_VALUES,SCATTER_FORWARD);
145: VecScatterEnd(vecscat1,fout1,final,INSERT_VALUES,SCATTER_FORWARD);
146: }
147: }
148: */
149: a = 1.0/(PetscReal)N_factor;
150: VecScale(fout1,a);
151: VecScale(final,a);
153: VecAssemblyBegin(ini);
154: VecAssemblyEnd(ini);
156: VecAssemblyBegin(final);
157: VecAssemblyEnd(final);
159: /* VecView(final,PETSC_VIEWER_STDOUT_WORLD); */
160: VecAXPY(final,-1.0,ini);
161: VecNorm(final,NORM_1,&enorm);
162: PetscPrintf(PETSC_COMM_WORLD," Error norm of |x - z| = %e\n",enorm);
163: fftw_destroy_plan(fplan);
164: fftw_destroy_plan(bplan);
165: fftw_free(in1); VecDestroy(&fin);
166: fftw_free(out); VecDestroy(&fout);
167: fftw_free(in2); VecDestroy(&fout1);
169: PetscFinalize();
170: return ierr;
171: }