Actual source code: ex14.c
2: static char help[]= "Test event log of VecScatter with various block sizes\n\n";
4: #include <petscvec.h>
6: int main(int argc,char **argv)
7: {
8: PetscInt i,j,low,high,n=256,N,errors,tot_errors;
9: PetscInt bs=1,ix[2],iy[2];
10: PetscMPIInt nproc,rank;
11: PetscScalar *xval;
12: const PetscScalar *yval;
13: Vec x,y;
14: IS isx,isy;
15: VecScatter ctx;
16: const PetscInt niter = 10;
17: #if defined(PETSC_USE_LOG)
18: PetscLogStage stage1,stage2;
19: PetscLogEvent event1,event2;
20: PetscLogDouble numMessages,messageLength;
21: PetscEventPerfInfo eventInfo;
22: PetscInt tot_msg,tot_len,avg_len;
23: #endif
25: PetscInitialize(&argc,&argv,(char*)0,help);
26: PetscLogDefaultBegin();
27: MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
28: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
30: PetscLogStageRegister("Scatter(bs=1)", &stage1);
31: PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1);
32: PetscLogStageRegister("Scatter(bs=4)", &stage2);
33: PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2);
35: /* Create a parallel vector x and a sequential vector y */
36: VecCreate(PETSC_COMM_WORLD,&x);
37: VecSetSizes(x,n,PETSC_DECIDE);
38: VecSetFromOptions(x);
39: VecGetOwnershipRange(x,&low,&high);
40: VecGetSize(x,&N);
41: VecCreateSeq(PETSC_COMM_SELF,n,&y);
43: /*=======================================
44: test VecScatter with bs = 1
45: ======================================*/
47: /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
48: which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
49: */
50: bs = 1;
51: ix[0] = rank? low-1 : N-1; /* ix[] contains global indices of the two ghost points */
52: ix[1] = (rank != nproc-1)? high : 0;
53: iy[0] = 0;
54: iy[1] = 1;
56: ISCreateGeneral(PETSC_COMM_SELF,2,ix,PETSC_COPY_VALUES,&isx);
57: ISCreateGeneral(PETSC_COMM_SELF,2,iy,PETSC_COPY_VALUES,&isy);
58: VecScatterCreate(x,isx,y,isy,&ctx);
59: VecScatterSetUp(ctx);
61: PetscLogStagePush(stage1);
62: PetscLogEventBegin(event1,0,0,0,0);
63: errors = 0;
64: for (i=0; i<niter; i++) {
65: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
66: VecGetArray(x,&xval);
67: for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
68: VecRestoreArray(x,&xval);
69: /* scatter the ghosts to y */
70: VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
71: VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
72: /* check if y has correct values */
73: VecGetArrayRead(y,&yval);
74: if ((PetscInt)PetscRealPart(yval[0]) != ix[0]+i) errors++;
75: if ((PetscInt)PetscRealPart(yval[1]) != ix[1]+i) errors++;
76: VecRestoreArrayRead(y,&yval);
77: }
78: PetscLogEventEnd(event1,0,0,0,0);
79: PetscLogStagePop();
81: /* check if we found wrong values on any processors */
82: MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
83: if (tot_errors) PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n",bs);
85: /* print out event log of VecScatter(bs=1) */
86: #if defined(PETSC_USE_LOG)
87: PetscLogEventGetPerfInfo(stage1,event1,&eventInfo);
88: MPI_Allreduce(&eventInfo.numMessages, &numMessages, 1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
89: MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
90: tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
91: tot_len = (PetscInt)messageLength*0.5;
92: avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
93: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
94: PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n",bs,tot_msg,tot_len,avg_len);
95: #endif
97: ISDestroy(&isx);
98: ISDestroy(&isy);
99: VecScatterDestroy(&ctx);
101: /*=======================================
102: test VecScatter with bs = 4
103: ======================================*/
105: /* similar to the 3-point stencil above, except that this time a ghost is a block */
106: bs = 4; /* n needs to be a multiple of bs to make the following code work */
107: ix[0] = rank? low/bs-1 : N/bs-1; /* ix[] contains global indices of the two ghost blocks */
108: ix[1] = (rank != nproc-1)? high/bs : 0;
109: iy[0] = 0;
110: iy[1] = 1;
112: ISCreateBlock(PETSC_COMM_SELF,bs,2,ix,PETSC_COPY_VALUES,&isx);
113: ISCreateBlock(PETSC_COMM_SELF,bs,2,iy,PETSC_COPY_VALUES,&isy);
115: VecScatterCreate(x,isx,y,isy,&ctx);
116: /* Call SetUp explicitly, otherwise messages in implicit SetUp will be counted in events below */
117: VecScatterSetUp(ctx);
119: PetscLogStagePush(stage2);
120: PetscLogEventBegin(event2,0,0,0,0);
121: errors = 0;
122: for (i=0; i<niter; i++) {
123: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
124: VecGetArray(x,&xval);
125: for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
126: VecRestoreArray(x,&xval);
127: /* scatter the ghost blocks to y */
128: VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
129: VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
130: /* check if y has correct values */
131: VecGetArrayRead(y,&yval);
132: if ((PetscInt)PetscRealPart(yval[0]) != ix[0]*bs+i) errors++;
133: if ((PetscInt)PetscRealPart(yval[bs]) != ix[1]*bs+i) errors++;
134: VecRestoreArrayRead(y,&yval);
135: }
136: PetscLogEventEnd(event2,0,0,0,0);
137: PetscLogStagePop();
139: /* check if we found wrong values on any processors */
140: MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
141: if (tot_errors) PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n",bs);
143: /* print out event log of VecScatter(bs=4) */
144: #if defined(PETSC_USE_LOG)
145: PetscLogEventGetPerfInfo(stage2,event2,&eventInfo);
146: MPI_Allreduce(&eventInfo.numMessages, &numMessages, 1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
147: MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
148: tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
149: tot_len = (PetscInt)messageLength*0.5;
150: avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
151: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
152: PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n",bs,tot_msg,tot_len,avg_len);
153: #endif
155: PetscPrintf(PETSC_COMM_WORLD,"Program finished\n");
156: ISDestroy(&isx);
157: ISDestroy(&isy);
158: VecScatterDestroy(&ctx);
159: VecDestroy(&x);
160: VecDestroy(&y);
161: PetscFinalize();
162: return 0;
163: }
165: /*TEST
167: test:
168: nsize: 4
169: args:
170: requires: double defined(PETSC_USE_LOG)
172: TEST*/