Actual source code: ex3.c
1: static char help[]= "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2: and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3: Run with ./prog -op [replace | sum]\n\n";
5: #include <petscvec.h>
6: #include <petscsf.h>
7: int main(int argc,char **argv)
8: {
9: PetscInt i,N=10,low,high,nleaves;
10: PetscMPIInt size,rank;
11: Vec x,y,y2,gy2;
12: PetscScalar *rootdata,*leafdata,*leafupdate;
13: PetscLayout layout;
14: PetscSF gathersf,allgathersf,alltoallsf;
15: MPI_Op op=MPI_SUM;
16: char opname[64];
17: const char *mpiopname;
18: PetscBool flag,isreplace,issum;
20: PetscInitialize(&argc,&argv,(char*)0,help);
21: MPI_Comm_size(PETSC_COMM_WORLD,&size);
22: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
24: PetscOptionsGetString(NULL,NULL,"-op",opname,sizeof(opname),&flag);
25: PetscStrcmp(opname,"replace",&isreplace);
26: PetscStrcmp(opname,"sum",&issum);
28: if (isreplace) {op = MPI_REPLACE; mpiopname = "MPI_REPLACE";}
29: else if (issum) {op = MPIU_SUM; mpiopname = "MPI_SUM";}
30: else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Unsupported argument (%s) to -op, which must be 'replace' or 'sum'",opname);
32: VecCreate(PETSC_COMM_WORLD,&x);
33: VecSetFromOptions(x);
34: VecSetSizes(x,PETSC_DECIDE,N);
36: /*-------------------------------------*/
37: /* PETSCSF_PATTERN_GATHER */
38: /*-------------------------------------*/
40: /* set MPI vec x to [1, 2, .., N] */
41: VecGetOwnershipRange(x,&low,&high);
42: for (i=low; i<high; i++) VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES);
43: VecAssemblyBegin(x);
44: VecAssemblyEnd(x);
46: /* Create the gather SF */
47: PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n",mpiopname);
48: VecGetLayout(x,&layout);
49: PetscSFCreate(PETSC_COMM_WORLD,&gathersf);
50: PetscSFSetGraphWithPattern(gathersf,layout,PETSCSF_PATTERN_GATHER);
52: /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
53: PetscSFGetGraph(gathersf,NULL,&nleaves,NULL,NULL);
54: VecCreateSeq(PETSC_COMM_SELF,nleaves,&y);
55: VecDuplicate(y,&y2);
57: VecGetArray(x,&rootdata);
58: VecGetArray(y,&leafdata);
59: VecGetArray(y2,&leafupdate);
61: /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
62: PetscSFBcastBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
63: PetscSFBcastEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
64: VecRestoreArray(y,&leafdata);
65: VecScale(y,2);
66: VecGetArray(y,&leafdata);
68: /* FetchAndOp x to y */
69: PetscSFFetchAndOpBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
70: PetscSFFetchAndOpEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
72: /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
73: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2);
74: PetscObjectSetName((PetscObject)x,"rootdata");
75: PetscObjectSetName((PetscObject)gy2,"leafupdate");
77: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
78: VecView(gy2,PETSC_VIEWER_STDOUT_WORLD);
79: VecDestroy(&gy2);
81: VecRestoreArray(y2,&leafupdate);
82: VecDestroy(&y2);
84: VecRestoreArray(y,&leafdata);
85: VecDestroy(&y);
87: VecRestoreArray(x,&rootdata);
88: /* VecDestroy(&x); */ /* We will reuse x in ALLGATHER, so do not destroy it */
90: PetscSFDestroy(&gathersf);
92: /*-------------------------------------*/
93: /* PETSCSF_PATTERN_ALLGATHER */
94: /*-------------------------------------*/
96: /* set MPI vec x to [1, 2, .., N] */
97: for (i=low; i<high; i++) VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES);
98: VecAssemblyBegin(x);
99: VecAssemblyEnd(x);
101: /* Create the allgather SF */
102: PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n",mpiopname);
103: VecGetLayout(x,&layout);
104: PetscSFCreate(PETSC_COMM_WORLD,&allgathersf);
105: PetscSFSetGraphWithPattern(allgathersf,layout,PETSCSF_PATTERN_ALLGATHER);
107: /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
108: PetscSFGetGraph(allgathersf,NULL,&nleaves,NULL,NULL);
109: VecCreateSeq(PETSC_COMM_SELF,nleaves,&y);
110: VecDuplicate(y,&y2);
112: VecGetArray(x,&rootdata);
113: VecGetArray(y,&leafdata);
114: VecGetArray(y2,&leafupdate);
116: /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
117: PetscSFBcastBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
118: PetscSFBcastEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
119: VecRestoreArray(y,&leafdata);
120: VecScale(y,2);
121: VecGetArray(y,&leafdata);
123: /* FetchAndOp x to y */
124: PetscSFFetchAndOpBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
125: PetscSFFetchAndOpEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
127: /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
128: rank 0: leafupdate = rootdata = [1,N], rootdata += leafdata = [3,3*N]
129: rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
130: rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
131: ...
132: */
133: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2);
134: PetscObjectSetName((PetscObject)x,"rootdata");
135: PetscObjectSetName((PetscObject)gy2,"leafupdate");
137: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
138: VecView(gy2,PETSC_VIEWER_STDOUT_WORLD);
139: VecDestroy(&gy2);
141: VecRestoreArray(y2,&leafupdate);
142: VecDestroy(&y2);
144: VecRestoreArray(y,&leafdata);
145: VecDestroy(&y);
147: VecRestoreArray(x,&rootdata);
148: VecDestroy(&x); /* We won't reuse x in ALLGATHER, so destroy it */
150: PetscSFDestroy(&allgathersf);
152: /*-------------------------------------*/
153: /* PETSCSF_PATTERN_ALLTOALL */
154: /*-------------------------------------*/
156: VecCreate(PETSC_COMM_WORLD,&x);
157: VecSetFromOptions(x);
158: VecSetSizes(x,size,PETSC_DECIDE);
160: /* set MPI vec x to [1, 2, .., size^2] */
161: VecGetOwnershipRange(x,&low,&high);
162: for (i=low; i<high; i++) VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES);
163: VecAssemblyBegin(x);
164: VecAssemblyEnd(x);
166: /* Create the alltoall SF */
167: PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n",mpiopname);
168: PetscSFCreate(PETSC_COMM_WORLD,&alltoallsf);
169: PetscSFSetGraphWithPattern(alltoallsf,NULL/*insignificant*/,PETSCSF_PATTERN_ALLTOALL);
171: /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
172: PetscSFGetGraph(alltoallsf,NULL,&nleaves,NULL,NULL);
173: VecCreateSeq(PETSC_COMM_SELF,nleaves,&y);
174: VecDuplicate(y,&y2);
176: VecGetArray(x,&rootdata);
177: VecGetArray(y,&leafdata);
178: VecGetArray(y2,&leafupdate);
180: /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
181: PetscSFBcastBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
182: PetscSFBcastEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE);
184: /* FetchAndOp x to y */
185: PetscSFFetchAndOpBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
186: PetscSFFetchAndOpEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op);
188: /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
189: So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
190: */
191: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2);
192: PetscObjectSetName((PetscObject)x,"rootdata");
193: PetscObjectSetName((PetscObject)gy2,"leafupdate");
195: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
196: VecView(gy2,PETSC_VIEWER_STDOUT_WORLD);
197: VecDestroy(&gy2);
199: VecRestoreArray(y2,&leafupdate);
200: VecDestroy(&y2);
202: VecRestoreArray(y,&leafdata);
203: VecDestroy(&y);
205: VecRestoreArray(x,&rootdata);
206: VecDestroy(&x);
208: PetscSFDestroy(&alltoallsf);
210: PetscFinalize();
211: return 0;
212: }
214: /*TEST
216: test:
217: # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
218: #MPI_Sendrecv_replace is broken with 20210400300
219: requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
220: nsize: 2
221: args: -op replace
223: test:
224: suffix: 2
225: nsize: 2
226: args: -op sum
228: # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
229: test:
230: #MPI_Sendrecv_replace is broken with 20210400300
231: requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
232: suffix: 3
233: nsize: 3
234: args: -op replace
236: test:
237: suffix: 4
238: nsize: 3
239: args: -op sum
241: TEST*/