Actual source code: plexcheckinterface.c

  1: #include <petsc/private/dmpleximpl.h>

  3: /* TODO PetscArrayExchangeBegin/End */
  4: /* TODO blocksize */
  5: /* TODO move to API ? */
  6: static PetscErrorCode ExchangeArrayByRank_Private(PetscObject obj, MPI_Datatype dt, PetscInt nsranks, const PetscMPIInt sranks[], PetscInt ssize[], const void *sarr[], PetscInt nrranks, const PetscMPIInt rranks[], PetscInt *rsize_out[], void **rarr_out[])
  7: {
  8:   PetscInt r;
  9:   PetscInt *rsize;
 10:   void **rarr;
 11:   MPI_Request *sreq, *rreq;
 12:   PetscMPIInt tag, unitsize;
 13:   MPI_Comm comm;

 15:   MPI_Type_size(dt, &unitsize);
 16:   PetscObjectGetComm(obj, &comm);
 17:   PetscMalloc2(nrranks, &rsize, nrranks, &rarr);
 18:   PetscMalloc2(nrranks, &rreq, nsranks, &sreq);
 19:   /* exchange array size */
 20:   PetscObjectGetNewTag(obj,&tag);
 21:   for (r=0; r<nrranks; r++) {
 22:     MPI_Irecv(&rsize[r], 1, MPIU_INT, rranks[r], tag, comm, &rreq[r]);
 23:   }
 24:   for (r=0; r<nsranks; r++) {
 25:     MPI_Isend(&ssize[r], 1, MPIU_INT, sranks[r], tag, comm, &sreq[r]);
 26:   }
 27:   MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE);
 28:   MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE);
 29:   /* exchange array */
 30:   PetscObjectGetNewTag(obj,&tag);
 31:   for (r=0; r<nrranks; r++) {
 32:     PetscMalloc(rsize[r]*unitsize, &rarr[r]);
 33:     MPI_Irecv(rarr[r], rsize[r], dt, rranks[r], tag, comm, &rreq[r]);
 34:   }
 35:   for (r=0; r<nsranks; r++) {
 36:     MPI_Isend(sarr[r], ssize[r], dt, sranks[r], tag, comm, &sreq[r]);
 37:   }
 38:   MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE);
 39:   MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE);
 40:   PetscFree2(rreq, sreq);
 41:   *rsize_out = rsize;
 42:   *rarr_out = rarr;
 43:   return 0;
 44: }

 46: /* TODO VecExchangeBegin/End */
 47: /* TODO move to API ? */
 48: static PetscErrorCode ExchangeVecByRank_Private(PetscObject obj, PetscInt nsranks, const PetscMPIInt sranks[], Vec svecs[], PetscInt nrranks, const PetscMPIInt rranks[], Vec *rvecs[])
 49: {
 50:   PetscInt r;
 51:   PetscInt *ssize, *rsize;
 52:   PetscScalar **rarr;
 53:   const PetscScalar **sarr;
 54:   Vec *rvecs_;
 55:   MPI_Request *sreq, *rreq;

 57:   PetscMalloc4(nsranks, &ssize, nsranks, &sarr, nrranks, &rreq, nsranks, &sreq);
 58:   for (r=0; r<nsranks; r++) {
 59:     VecGetLocalSize(svecs[r], &ssize[r]);
 60:     VecGetArrayRead(svecs[r], &sarr[r]);
 61:   }
 62:   ExchangeArrayByRank_Private(obj, MPIU_SCALAR, nsranks, sranks, ssize, (const void**)sarr, nrranks, rranks, &rsize, (void***)&rarr);
 63:   PetscMalloc1(nrranks, &rvecs_);
 64:   for (r=0; r<nrranks; r++) {
 65:     /* set array in two steps to mimic PETSC_OWN_POINTER */
 66:     VecCreateSeqWithArray(PETSC_COMM_SELF, 1, rsize[r], NULL, &rvecs_[r]);
 67:     VecReplaceArray(rvecs_[r], rarr[r]);
 68:   }
 69:   for (r=0; r<nsranks; r++) {
 70:     VecRestoreArrayRead(svecs[r], &sarr[r]);
 71:   }
 72:   PetscFree2(rsize, rarr);
 73:   PetscFree4(ssize, sarr, rreq, sreq);
 74:   *rvecs = rvecs_;
 75:   return 0;
 76: }

 78: static PetscErrorCode SortByRemote_Private(PetscSF sf, PetscInt *rmine1[], PetscInt *rremote1[])
 79: {
 80:   PetscInt            nleaves;
 81:   PetscInt            nranks;
 82:   const PetscMPIInt   *ranks;
 83:   const PetscInt      *roffset, *rmine, *rremote;
 84:   PetscInt            n, o, r;

 86:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote);
 87:   nleaves = roffset[nranks];
 88:   PetscMalloc2(nleaves, rmine1, nleaves, rremote1);
 89:   for (r=0; r<nranks; r++) {
 90:     /* simultaneously sort rank-wise portions of rmine & rremote by values in rremote
 91:        - to unify order with the other side */
 92:     o = roffset[r];
 93:     n = roffset[r+1] - o;
 94:     PetscArraycpy(&(*rmine1)[o], &rmine[o], n);
 95:     PetscArraycpy(&(*rremote1)[o], &rremote[o], n);
 96:     PetscSortIntWithArray(n, &(*rremote1)[o], &(*rmine1)[o]);
 97:   }
 98:   return 0;
 99: }

101: static PetscErrorCode GetRecursiveConeCoordinatesPerRank_Private(DM dm, PetscSF sf, PetscInt rmine[], Vec *coordinatesPerRank[])
102: {
103:   IS                  pointsPerRank, conesPerRank;
104:   PetscInt            nranks;
105:   const PetscMPIInt   *ranks;
106:   const PetscInt      *roffset;
107:   PetscInt            n, o, r;

109:   DMGetCoordinatesLocalSetUp(dm);
110:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, NULL, NULL);
111:   PetscMalloc1(nranks, coordinatesPerRank);
112:   for (r=0; r<nranks; r++) {
113:     o = roffset[r];
114:     n = roffset[r+1] - o;
115:     ISCreateGeneral(PETSC_COMM_SELF, n, &rmine[o], PETSC_USE_POINTER, &pointsPerRank);
116:     DMPlexGetConeRecursiveVertices(dm, pointsPerRank, &conesPerRank);
117:     DMGetCoordinatesLocalTuple(dm, conesPerRank, NULL, &(*coordinatesPerRank)[r]);
118:     ISDestroy(&pointsPerRank);
119:     ISDestroy(&conesPerRank);
120:   }
121:   return 0;
122: }

124: static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(PetscSF sf, PetscSF imsf, PetscInt *irmine1[])
125: {
126:   PetscInt            *mRootsOrigNumbering;
127:   PetscInt            nileaves, niranks;
128:   const PetscInt      *iroffset, *irmine, *degree;
129:   PetscInt            i, n, o, r;

131:   PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL);
132:   PetscSFGetRootRanks(imsf, &niranks, NULL, &iroffset, &irmine, NULL);
134:   PetscSFComputeDegreeBegin(sf, &degree);
135:   PetscSFComputeDegreeEnd(sf, &degree);
136:   PetscSFComputeMultiRootOriginalNumbering(sf, degree, NULL, &mRootsOrigNumbering);
137:   PetscMalloc1(nileaves, irmine1);
138:   for (r=0; r<niranks; r++) {
139:     o = iroffset[r];
140:     n = iroffset[r+1] - o;
141:     for (i=0; i<n; i++) (*irmine1)[o+i] = mRootsOrigNumbering[irmine[o+i]];
142:   }
143:   PetscFree(mRootsOrigNumbering);
144:   return 0;
145: }

147: /*@
148:   DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.

150:   Input Parameters:
151: . dm - The DMPlex object

153:   Notes:
154:   For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point SF containts connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
155:   then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.

157:   This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use DMPlexCheckFaces().

159:   For the complete list of DMPlexCheck* functions, see DMSetFromOptions().

161:   Developer Note:
162:   Interface cones are expanded into vertices and then their coordinates are compared.

164:   Level: developer

166: .seealso: DMPlexGetCone(), DMPlexGetConeSize(), DMGetPointSF(), DMGetCoordinates(), DMSetFromOptions()
167: @*/
168: PetscErrorCode DMPlexCheckInterfaceCones(DM dm)
169: {
170:   PetscSF             sf;
171:   PetscInt            nleaves, nranks, nroots;
172:   const PetscInt      *mine, *roffset, *rmine, *rremote;
173:   const PetscSFNode   *remote;
174:   const PetscMPIInt   *ranks;
175:   PetscSF             msf, imsf;
176:   PetscInt            nileaves, niranks;
177:   const PetscMPIInt   *iranks;
178:   const PetscInt      *iroffset, *irmine, *irremote;
179:   PetscInt            *rmine1, *rremote1; /* rmine and rremote copies simultaneously sorted by rank and rremote */
180:   PetscInt            *mine_orig_numbering;
181:   Vec                 *sntCoordinatesPerRank;
182:   Vec                 *refCoordinatesPerRank;
183:   Vec                 *recCoordinatesPerRank=NULL;
184:   PetscInt            r;
185:   PetscMPIInt         commsize, myrank;
186:   PetscBool           same;
187:   PetscBool           verbose=PETSC_FALSE;
188:   MPI_Comm            comm;

191:   PetscObjectGetComm((PetscObject)dm, &comm);
192:   MPI_Comm_rank(comm, &myrank);
193:   MPI_Comm_size(comm, &commsize);
194:   if (commsize < 2) return 0;
195:   DMGetPointSF(dm, &sf);
196:   if (!sf) return 0;
197:   PetscSFGetGraph(sf, &nroots, &nleaves, &mine, &remote);
198:   if (nroots < 0) return 0;
200:   PetscSFSetUp(sf);
201:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote);

203:   /* Expand sent cones per rank */
204:   SortByRemote_Private(sf, &rmine1, &rremote1);
205:   GetRecursiveConeCoordinatesPerRank_Private(dm, sf, rmine1, &sntCoordinatesPerRank);

207:   /* Create inverse SF */
208:   PetscSFGetMultiSF(sf,&msf);
209:   PetscSFCreateInverseSF(msf,&imsf);
210:   PetscSFSetUp(imsf);
211:   PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL);
212:   PetscSFGetRootRanks(imsf, &niranks, &iranks, &iroffset, &irmine, &irremote);

214:   /* Compute original numbering of multi-roots (referenced points) */
215:   PetscSFComputeMultiRootOriginalNumberingByRank_Private(sf, imsf, &mine_orig_numbering);

217:   /* Expand coordinates of the referred cones per rank */
218:   GetRecursiveConeCoordinatesPerRank_Private(dm, imsf, mine_orig_numbering, &refCoordinatesPerRank);

220:   /* Send the coordinates */
221:   ExchangeVecByRank_Private((PetscObject)sf, nranks, ranks, sntCoordinatesPerRank, niranks, iranks, &recCoordinatesPerRank);

223:   /* verbose output */
224:   PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL);
225:   if (verbose) {
226:     PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
227:     PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n");
228:     PetscViewerASCIIPushSynchronized(v);
229:     PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", myrank);
230:     for (r=0; r<nranks; r++) {
231:       PetscViewerASCIISynchronizedPrintf(v, "  r=%D ranks[r]=%d sntCoordinatesPerRank[r]:\n", r, ranks[r]);
232:       PetscViewerASCIIPushTab(v);
233:       PetscViewerGetSubViewer(v,PETSC_COMM_SELF,&sv);
234:       VecView(sntCoordinatesPerRank[r], sv);
235:       PetscViewerRestoreSubViewer(v,PETSC_COMM_SELF,&sv);
236:       PetscViewerASCIIPopTab(v);
237:     }
238:     PetscViewerASCIISynchronizedPrintf(v, "  ----------\n");
239:     for (r=0; r<niranks; r++) {
240:       PetscViewerASCIISynchronizedPrintf(v, "  r=%D iranks[r]=%d refCoordinatesPerRank[r]:\n", r, iranks[r]);
241:       PetscViewerASCIIPushTab(v);
242:       PetscViewerGetSubViewer(v,PETSC_COMM_SELF,&sv);
243:       VecView(refCoordinatesPerRank[r], sv);
244:       PetscViewerRestoreSubViewer(v,PETSC_COMM_SELF,&sv);
245:       PetscViewerASCIIPopTab(v);
246:     }
247:     PetscViewerASCIISynchronizedPrintf(v, "  ----------\n");
248:     for (r=0; r<niranks; r++) {
249:       PetscViewerASCIISynchronizedPrintf(v, "  r=%D iranks[r]=%d recCoordinatesPerRank[r]:\n", r, iranks[r]);
250:       PetscViewerASCIIPushTab(v);
251:       PetscViewerGetSubViewer(v,PETSC_COMM_SELF,&sv);
252:       VecView(recCoordinatesPerRank[r], sv);
253:       PetscViewerRestoreSubViewer(v,PETSC_COMM_SELF,&sv);
254:       PetscViewerASCIIPopTab(v);
255:     }
256:     PetscViewerFlush(v);
257:     PetscViewerASCIIPopSynchronized(v);
258:   }

260:   /* Compare recCoordinatesPerRank with refCoordinatesPerRank */
261:   for (r=0; r<niranks; r++) {
262:     VecEqual(refCoordinatesPerRank[r], recCoordinatesPerRank[r], &same);
264:   }

266:   /* destroy sent stuff */
267:   for (r=0; r<nranks; r++) {
268:     VecDestroy(&sntCoordinatesPerRank[r]);
269:   }
270:   PetscFree(sntCoordinatesPerRank);
271:   PetscFree2(rmine1, rremote1);
272:   PetscSFDestroy(&imsf);

274:   /* destroy referenced stuff */
275:   for (r=0; r<niranks; r++) {
276:     VecDestroy(&refCoordinatesPerRank[r]);
277:   }
278:   PetscFree(refCoordinatesPerRank);
279:   PetscFree(mine_orig_numbering);

281:   /* destroy received stuff */
282:   for (r=0; r<niranks; r++) {
283:     VecDestroy(&recCoordinatesPerRank[r]);
284:   }
285:   PetscFree(recCoordinatesPerRank);
286:   return 0;
287: }