Actual source code: telescope_dmda.c
1: #include <petsc/private/matimpl.h>
2: #include <petsc/private/pcimpl.h>
3: #include <petsc/private/dmimpl.h>
4: #include <petscksp.h>
5: #include <petscdm.h>
6: #include <petscdmda.h>
8: #include "../src/ksp/pc/impls/telescope/telescope.h"
10: static PetscBool cited = PETSC_FALSE;
11: static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12: " title = {Extreme-Scale Multigrid Components within PETSc},\n"
13: " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14: " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15: " series = {PASC '16},\n"
16: " isbn = {978-1-4503-4126-4},\n"
17: " location = {Lausanne, Switzerland},\n"
18: " pages = {5:1--5:12},\n"
19: " articleno = {5},\n"
20: " numpages = {12},\n"
21: " url = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22: " doi = {10.1145/2929908.2929913},\n"
23: " acmid = {2929913},\n"
24: " publisher = {ACM},\n"
25: " address = {New York, NY, USA},\n"
26: " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27: " year = {2016}\n"
28: "}\n";
30: static PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim, PetscInt i, PetscInt j, PetscInt k, PetscInt Mp, PetscInt Np, PetscInt Pp, PetscInt start_i[], PetscInt start_j[], PetscInt start_k[], PetscInt span_i[], PetscInt span_j[], PetscInt span_k[], PetscMPIInt *_pi, PetscMPIInt *_pj, PetscMPIInt *_pk, PetscMPIInt *rank_re)
31: {
32: PetscInt pi, pj, pk, n;
34: PetscFunctionBegin;
35: *rank_re = -1;
36: if (_pi) *_pi = -1;
37: if (_pj) *_pj = -1;
38: if (_pk) *_pk = -1;
39: pi = pj = pk = -1;
40: if (_pi) {
41: for (n = 0; n < Mp; n++) {
42: if ((i >= start_i[n]) && (i < start_i[n] + span_i[n])) {
43: pi = n;
44: break;
45: }
46: }
47: PetscCheck(pi != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pi cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Mp, i);
48: PetscCall(PetscMPIIntCast(pi, _pi));
49: }
51: if (_pj) {
52: for (n = 0; n < Np; n++) {
53: if ((j >= start_j[n]) && (j < start_j[n] + span_j[n])) {
54: pj = n;
55: break;
56: }
57: }
58: PetscCheck(pj != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pj cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Np, j);
59: PetscCall(PetscMPIIntCast(pj, _pj));
60: }
62: if (_pk) {
63: for (n = 0; n < Pp; n++) {
64: if ((k >= start_k[n]) && (k < start_k[n] + span_k[n])) {
65: pk = n;
66: break;
67: }
68: }
69: PetscCheck(pk != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pk cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Pp, k);
70: PetscCall(PetscMPIIntCast(pk, _pk));
71: }
73: switch (dim) {
74: case 1:
75: PetscCall(PetscMPIIntCast(pi, rank_re));
76: break;
77: case 2:
78: PetscCall(PetscMPIIntCast(pi + pj * Mp, rank_re));
79: break;
80: case 3:
81: PetscCall(PetscMPIIntCast(pi + pj * Mp + pk * (Mp * Np), rank_re));
82: break;
83: }
84: PetscFunctionReturn(PETSC_SUCCESS);
85: }
87: static PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim, PetscMPIInt rank_re, PetscInt Mp_re, PetscInt Np_re, PetscInt Pp_re, PetscInt range_i_re[], PetscInt range_j_re[], PetscInt range_k_re[], PetscInt *s0)
88: {
89: PetscInt i, j, k, start_IJK = 0;
90: PetscInt rank_ijk;
92: PetscFunctionBegin;
93: switch (dim) {
94: case 1:
95: for (i = 0; i < Mp_re; i++) {
96: rank_ijk = i;
97: if (rank_ijk < rank_re) start_IJK += range_i_re[i];
98: }
99: break;
100: case 2:
101: for (j = 0; j < Np_re; j++) {
102: for (i = 0; i < Mp_re; i++) {
103: rank_ijk = i + j * Mp_re;
104: if (rank_ijk < rank_re) start_IJK += range_i_re[i] * range_j_re[j];
105: }
106: }
107: break;
108: case 3:
109: for (k = 0; k < Pp_re; k++) {
110: for (j = 0; j < Np_re; j++) {
111: for (i = 0; i < Mp_re; i++) {
112: rank_ijk = i + j * Mp_re + k * Mp_re * Np_re;
113: if (rank_ijk < rank_re) start_IJK += range_i_re[i] * range_j_re[j] * range_k_re[k];
114: }
115: }
116: }
117: break;
118: }
119: *s0 = start_IJK;
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PC_Telescope sred, DM dm, DM subdm)
124: {
125: DM cdm;
126: Vec coor, coor_natural, perm_coors;
127: PetscInt i, j, si, sj, ni, nj, M, N, Ml, Nl, c, nidx;
128: PetscInt *fine_indices;
129: IS is_fine, is_local;
130: VecScatter sctx;
132: PetscFunctionBegin;
133: PetscCall(DMGetCoordinates(dm, &coor));
134: if (!coor) PetscFunctionReturn(PETSC_SUCCESS);
135: if (PCTelescope_isActiveRank(sred)) PetscCall(DMDASetUniformCoordinates(subdm, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
136: /* Get the coordinate vector from the distributed array */
137: PetscCall(DMGetCoordinateDM(dm, &cdm));
138: PetscCall(DMDACreateNaturalVector(cdm, &coor_natural));
140: PetscCall(DMDAGlobalToNaturalBegin(cdm, coor, INSERT_VALUES, coor_natural));
141: PetscCall(DMDAGlobalToNaturalEnd(cdm, coor, INSERT_VALUES, coor_natural));
143: /* get indices of the guys I want to grab */
144: PetscCall(DMDAGetInfo(dm, NULL, &M, &N, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
145: if (PCTelescope_isActiveRank(sred)) {
146: PetscCall(DMDAGetCorners(subdm, &si, &sj, NULL, &ni, &nj, NULL));
147: Ml = ni;
148: Nl = nj;
149: } else {
150: si = sj = 0;
151: ni = nj = 0;
152: Ml = Nl = 0;
153: }
155: PetscCall(PetscMalloc1(Ml * Nl * 2, &fine_indices));
156: c = 0;
157: if (PCTelescope_isActiveRank(sred)) {
158: for (j = sj; j < sj + nj; j++) {
159: for (i = si; i < si + ni; i++) {
160: nidx = (i) + (j)*M;
161: fine_indices[c] = 2 * nidx;
162: fine_indices[c + 1] = 2 * nidx + 1;
163: c = c + 2;
164: }
165: }
166: PetscCheck(c == Ml * Nl * 2, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c %" PetscInt_FMT " should equal 2 * Ml %" PetscInt_FMT " * Nl %" PetscInt_FMT, c, Ml, Nl);
167: }
169: /* generate scatter */
170: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), Ml * Nl * 2, fine_indices, PETSC_USE_POINTER, &is_fine));
171: PetscCall(ISCreateStride(PETSC_COMM_SELF, Ml * Nl * 2, 0, 1, &is_local));
173: /* scatter */
174: PetscCall(VecCreate(PETSC_COMM_SELF, &perm_coors));
175: PetscCall(VecSetSizes(perm_coors, PETSC_DECIDE, Ml * Nl * 2));
176: PetscCall(VecSetType(perm_coors, VECSEQ));
178: PetscCall(VecScatterCreate(coor_natural, is_fine, perm_coors, is_local, &sctx));
179: PetscCall(VecScatterBegin(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
180: PetscCall(VecScatterEnd(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
181: /* access */
182: if (PCTelescope_isActiveRank(sred)) {
183: Vec _coors;
184: const PetscScalar *LA_perm;
185: PetscScalar *LA_coors;
187: PetscCall(DMGetCoordinates(subdm, &_coors));
188: PetscCall(VecGetArrayRead(perm_coors, &LA_perm));
189: PetscCall(VecGetArray(_coors, &LA_coors));
190: for (i = 0; i < Ml * Nl * 2; i++) LA_coors[i] = LA_perm[i];
191: PetscCall(VecRestoreArray(_coors, &LA_coors));
192: PetscCall(VecRestoreArrayRead(perm_coors, &LA_perm));
193: }
195: /* update local coords */
196: if (PCTelescope_isActiveRank(sred)) {
197: DM _dmc;
198: Vec _coors, _coors_local;
199: PetscCall(DMGetCoordinateDM(subdm, &_dmc));
200: PetscCall(DMGetCoordinates(subdm, &_coors));
201: PetscCall(DMGetCoordinatesLocal(subdm, &_coors_local));
202: PetscCall(DMGlobalToLocalBegin(_dmc, _coors, INSERT_VALUES, _coors_local));
203: PetscCall(DMGlobalToLocalEnd(_dmc, _coors, INSERT_VALUES, _coors_local));
204: }
205: PetscCall(VecScatterDestroy(&sctx));
206: PetscCall(ISDestroy(&is_fine));
207: PetscCall(PetscFree(fine_indices));
208: PetscCall(ISDestroy(&is_local));
209: PetscCall(VecDestroy(&perm_coors));
210: PetscCall(VecDestroy(&coor_natural));
211: PetscFunctionReturn(PETSC_SUCCESS);
212: }
214: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PC_Telescope sred, DM dm, DM subdm)
215: {
216: DM cdm;
217: Vec coor, coor_natural, perm_coors;
218: PetscInt i, j, k, si, sj, sk, ni, nj, nk, M, N, P, Ml, Nl, Pl, c, nidx;
219: PetscInt *fine_indices;
220: IS is_fine, is_local;
221: VecScatter sctx;
223: PetscFunctionBegin;
224: PetscCall(DMGetCoordinates(dm, &coor));
225: if (!coor) PetscFunctionReturn(PETSC_SUCCESS);
227: if (PCTelescope_isActiveRank(sred)) PetscCall(DMDASetUniformCoordinates(subdm, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
229: /* Get the coordinate vector from the distributed array */
230: PetscCall(DMGetCoordinateDM(dm, &cdm));
231: PetscCall(DMDACreateNaturalVector(cdm, &coor_natural));
232: PetscCall(DMDAGlobalToNaturalBegin(cdm, coor, INSERT_VALUES, coor_natural));
233: PetscCall(DMDAGlobalToNaturalEnd(cdm, coor, INSERT_VALUES, coor_natural));
235: /* get indices of the guys I want to grab */
236: PetscCall(DMDAGetInfo(dm, NULL, &M, &N, &P, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
238: if (PCTelescope_isActiveRank(sred)) {
239: PetscCall(DMDAGetCorners(subdm, &si, &sj, &sk, &ni, &nj, &nk));
240: Ml = ni;
241: Nl = nj;
242: Pl = nk;
243: } else {
244: si = sj = sk = 0;
245: ni = nj = nk = 0;
246: Ml = Nl = Pl = 0;
247: }
249: PetscCall(PetscMalloc1(Ml * Nl * Pl * 3, &fine_indices));
251: c = 0;
252: if (PCTelescope_isActiveRank(sred)) {
253: for (k = sk; k < sk + nk; k++) {
254: for (j = sj; j < sj + nj; j++) {
255: for (i = si; i < si + ni; i++) {
256: nidx = (i) + (j)*M + (k)*M * N;
257: fine_indices[c] = 3 * nidx;
258: fine_indices[c + 1] = 3 * nidx + 1;
259: fine_indices[c + 2] = 3 * nidx + 2;
260: c = c + 3;
261: }
262: }
263: }
264: }
266: /* generate scatter */
267: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), Ml * Nl * Pl * 3, fine_indices, PETSC_USE_POINTER, &is_fine));
268: PetscCall(ISCreateStride(PETSC_COMM_SELF, Ml * Nl * Pl * 3, 0, 1, &is_local));
270: /* scatter */
271: PetscCall(VecCreate(PETSC_COMM_SELF, &perm_coors));
272: PetscCall(VecSetSizes(perm_coors, PETSC_DECIDE, Ml * Nl * Pl * 3));
273: PetscCall(VecSetType(perm_coors, VECSEQ));
274: PetscCall(VecScatterCreate(coor_natural, is_fine, perm_coors, is_local, &sctx));
275: PetscCall(VecScatterBegin(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
276: PetscCall(VecScatterEnd(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
278: /* access */
279: if (PCTelescope_isActiveRank(sred)) {
280: Vec _coors;
281: const PetscScalar *LA_perm;
282: PetscScalar *LA_coors;
284: PetscCall(DMGetCoordinates(subdm, &_coors));
285: PetscCall(VecGetArrayRead(perm_coors, &LA_perm));
286: PetscCall(VecGetArray(_coors, &LA_coors));
287: for (i = 0; i < Ml * Nl * Pl * 3; i++) LA_coors[i] = LA_perm[i];
288: PetscCall(VecRestoreArray(_coors, &LA_coors));
289: PetscCall(VecRestoreArrayRead(perm_coors, &LA_perm));
290: }
292: /* update local coords */
293: if (PCTelescope_isActiveRank(sred)) {
294: DM _dmc;
295: Vec _coors, _coors_local;
297: PetscCall(DMGetCoordinateDM(subdm, &_dmc));
298: PetscCall(DMGetCoordinates(subdm, &_coors));
299: PetscCall(DMGetCoordinatesLocal(subdm, &_coors_local));
300: PetscCall(DMGlobalToLocalBegin(_dmc, _coors, INSERT_VALUES, _coors_local));
301: PetscCall(DMGlobalToLocalEnd(_dmc, _coors, INSERT_VALUES, _coors_local));
302: }
304: PetscCall(VecScatterDestroy(&sctx));
305: PetscCall(ISDestroy(&is_fine));
306: PetscCall(PetscFree(fine_indices));
307: PetscCall(ISDestroy(&is_local));
308: PetscCall(VecDestroy(&perm_coors));
309: PetscCall(VecDestroy(&coor_natural));
310: PetscFunctionReturn(PETSC_SUCCESS);
311: }
313: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
314: {
315: PetscInt dim;
316: DM dm, subdm;
317: PetscSubcomm psubcomm;
318: MPI_Comm comm;
319: Vec coor;
321: PetscFunctionBegin;
322: PetscCall(PCGetDM(pc, &dm));
323: PetscCall(DMGetCoordinates(dm, &coor));
324: if (!coor) PetscFunctionReturn(PETSC_SUCCESS);
325: psubcomm = sred->psubcomm;
326: comm = PetscSubcommParent(psubcomm);
327: subdm = ctx->dmrepart;
329: PetscCall(PetscInfo(pc, "PCTelescope: setting up the coordinates (DMDA)\n"));
330: PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
331: switch (dim) {
332: case 1:
333: SETERRQ(comm, PETSC_ERR_SUP, "Telescope: DMDA (1D) repartitioning not provided");
334: case 2:
335: PetscCall(PCTelescopeSetUp_dmda_repart_coors2d(sred, dm, subdm));
336: break;
337: case 3:
338: PetscCall(PCTelescopeSetUp_dmda_repart_coors3d(sred, dm, subdm));
339: break;
340: }
341: PetscFunctionReturn(PETSC_SUCCESS);
342: }
344: /* setup repartitioned dm */
345: static PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
346: {
347: DM dm;
348: PetscInt dim, nx, ny, nz, ndof, nsw, sum, k;
349: DMBoundaryType bx, by, bz;
350: DMDAStencilType stencil;
351: const PetscInt *_range_i_re;
352: const PetscInt *_range_j_re;
353: const PetscInt *_range_k_re;
354: DMDAInterpolationType itype;
355: PetscInt refine_x, refine_y, refine_z;
356: MPI_Comm comm, subcomm;
357: const char *prefix;
358: PetscMPIInt ni;
360: PetscFunctionBegin;
361: comm = PetscSubcommParent(sred->psubcomm);
362: subcomm = PetscSubcommChild(sred->psubcomm);
363: PetscCall(PCGetDM(pc, &dm));
365: PetscCall(DMDAGetInfo(dm, &dim, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, &nsw, &bx, &by, &bz, &stencil));
366: PetscCall(DMDAGetInterpolationType(dm, &itype));
367: PetscCall(DMDAGetRefinementFactor(dm, &refine_x, &refine_y, &refine_z));
369: ctx->dmrepart = NULL;
370: _range_i_re = _range_j_re = _range_k_re = NULL;
371: /* Create DMDA on the child communicator */
372: if (PCTelescope_isActiveRank(sred)) {
373: switch (dim) {
374: case 1:
375: PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (1D)\n"));
376: /* PetscCall(DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart)); */
377: ny = nz = 1;
378: by = bz = DM_BOUNDARY_NONE;
379: break;
380: case 2:
381: PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (2D)\n"));
382: /* PetscCall(DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE,
383: ndof,nsw, NULL,NULL,&ctx->dmrepart)); */
384: nz = 1;
385: bz = DM_BOUNDARY_NONE;
386: break;
387: case 3:
388: PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (3D)\n"));
389: /* PetscCall(DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz,
390: PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart)); */
391: break;
392: }
393: /*
394: The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
395: a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
396: This allows users to control the partitioning of the subDM.
397: */
398: PetscCall(DMDACreate(subcomm, &ctx->dmrepart));
399: /* Set unique option prefix name */
400: PetscCall(KSPGetOptionsPrefix(sred->ksp, &prefix));
401: PetscCall(DMSetOptionsPrefix(ctx->dmrepart, prefix));
402: PetscCall(DMAppendOptionsPrefix(ctx->dmrepart, "repart_"));
403: /* standard setup from DMDACreate{1,2,3}d() */
404: PetscCall(DMSetDimension(ctx->dmrepart, dim));
405: PetscCall(DMDASetSizes(ctx->dmrepart, nx, ny, nz));
406: PetscCall(DMDASetNumProcs(ctx->dmrepart, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE));
407: PetscCall(DMDASetBoundaryType(ctx->dmrepart, bx, by, bz));
408: PetscCall(DMDASetDof(ctx->dmrepart, ndof));
409: PetscCall(DMDASetStencilType(ctx->dmrepart, stencil));
410: PetscCall(DMDASetStencilWidth(ctx->dmrepart, nsw));
411: PetscCall(DMDASetOwnershipRanges(ctx->dmrepart, NULL, NULL, NULL));
412: PetscCall(DMSetFromOptions(ctx->dmrepart));
413: PetscCall(DMSetUp(ctx->dmrepart));
414: /* Set refinement factors and interpolation type from the partent */
415: PetscCall(DMDASetRefinementFactor(ctx->dmrepart, refine_x, refine_y, refine_z));
416: PetscCall(DMDASetInterpolationType(ctx->dmrepart, itype));
418: PetscCall(DMDAGetInfo(ctx->dmrepart, NULL, NULL, NULL, NULL, &ctx->Mp_re, &ctx->Np_re, &ctx->Pp_re, NULL, NULL, NULL, NULL, NULL, NULL));
419: PetscCall(DMDAGetOwnershipRanges(ctx->dmrepart, &_range_i_re, &_range_j_re, &_range_k_re));
421: ctx->dmrepart->ops->creatematrix = dm->ops->creatematrix;
422: ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
423: }
425: /* generate ranges for repartitioned dm */
426: /* note - assume rank 0 always participates */
427: /* TODO: use a single MPI call */
428: PetscCallMPI(MPI_Bcast(&ctx->Mp_re, 1, MPIU_INT, 0, comm));
429: PetscCallMPI(MPI_Bcast(&ctx->Np_re, 1, MPIU_INT, 0, comm));
430: PetscCallMPI(MPI_Bcast(&ctx->Pp_re, 1, MPIU_INT, 0, comm));
432: PetscCall(PetscCalloc3(ctx->Mp_re, &ctx->range_i_re, ctx->Np_re, &ctx->range_j_re, ctx->Pp_re, &ctx->range_k_re));
434: if (_range_i_re) PetscCall(PetscArraycpy(ctx->range_i_re, _range_i_re, ctx->Mp_re));
435: if (_range_j_re) PetscCall(PetscArraycpy(ctx->range_j_re, _range_j_re, ctx->Np_re));
436: if (_range_k_re) PetscCall(PetscArraycpy(ctx->range_k_re, _range_k_re, ctx->Pp_re));
438: /* TODO: use a single MPI call */
439: PetscCall(PetscMPIIntCast(ctx->Mp_re, &ni));
440: PetscCallMPI(MPI_Bcast(ctx->range_i_re, ni, MPIU_INT, 0, comm));
441: PetscCall(PetscMPIIntCast(ctx->Np_re, &ni));
442: PetscCallMPI(MPI_Bcast(ctx->range_j_re, ni, MPIU_INT, 0, comm));
443: PetscCall(PetscMPIIntCast(ctx->Pp_re, &ni));
444: PetscCallMPI(MPI_Bcast(ctx->range_k_re, ni, MPIU_INT, 0, comm));
446: PetscCall(PetscMalloc3(ctx->Mp_re, &ctx->start_i_re, ctx->Np_re, &ctx->start_j_re, ctx->Pp_re, &ctx->start_k_re));
448: sum = 0;
449: for (k = 0; k < ctx->Mp_re; k++) {
450: ctx->start_i_re[k] = sum;
451: sum += ctx->range_i_re[k];
452: }
454: sum = 0;
455: for (k = 0; k < ctx->Np_re; k++) {
456: ctx->start_j_re[k] = sum;
457: sum += ctx->range_j_re[k];
458: }
460: sum = 0;
461: for (k = 0; k < ctx->Pp_re; k++) {
462: ctx->start_k_re[k] = sum;
463: sum += ctx->range_k_re[k];
464: }
466: /* attach repartitioned dm to child ksp */
467: {
468: PetscErrorCode (*dmksp_func)(KSP, Mat, Mat, void *);
469: void *dmksp_ctx;
471: PetscCall(DMKSPGetComputeOperators(dm, &dmksp_func, &dmksp_ctx));
473: /* attach dm to ksp on sub communicator */
474: if (PCTelescope_isActiveRank(sred)) {
475: PetscCall(KSPSetDM(sred->ksp, ctx->dmrepart));
477: if (!dmksp_func || sred->ignore_kspcomputeoperators) {
478: PetscCall(KSPSetDMActive(sred->ksp, PETSC_FALSE));
479: } else {
480: /* sub ksp inherits dmksp_func and context provided by user */
481: PetscCall(KSPSetComputeOperators(sred->ksp, dmksp_func, dmksp_ctx));
482: PetscCall(KSPSetDMActive(sred->ksp, PETSC_TRUE));
483: }
484: }
485: }
486: PetscFunctionReturn(PETSC_SUCCESS);
487: }
489: static PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
490: {
491: DM dm;
492: MPI_Comm comm;
493: Mat Pscalar, P;
494: PetscInt ndof;
495: PetscInt i, j, k, location, startI[3], endI[3], lenI[3], nx, ny, nz;
496: PetscInt sr, er, Mr;
497: Vec V;
499: PetscFunctionBegin;
500: PetscCall(PetscInfo(pc, "PCTelescope: setting up the permutation matrix (DMDA-3D)\n"));
501: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
503: PetscCall(PCGetDM(pc, &dm));
504: PetscCall(DMDAGetInfo(dm, NULL, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL));
506: PetscCall(DMGetGlobalVector(dm, &V));
507: PetscCall(VecGetSize(V, &Mr));
508: PetscCall(VecGetOwnershipRange(V, &sr, &er));
509: PetscCall(DMRestoreGlobalVector(dm, &V));
510: sr = sr / ndof;
511: er = er / ndof;
512: Mr = Mr / ndof;
514: PetscCall(MatCreate(comm, &Pscalar));
515: PetscCall(MatSetSizes(Pscalar, er - sr, er - sr, Mr, Mr));
516: PetscCall(MatSetType(Pscalar, MATAIJ));
517: PetscCall(MatSeqAIJSetPreallocation(Pscalar, 1, NULL));
518: PetscCall(MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL));
520: PetscCall(DMDAGetCorners(dm, NULL, NULL, NULL, &lenI[0], &lenI[1], &lenI[2]));
521: PetscCall(DMDAGetCorners(dm, &startI[0], &startI[1], &startI[2], &endI[0], &endI[1], &endI[2]));
522: endI[0] += startI[0];
523: endI[1] += startI[1];
524: endI[2] += startI[2];
526: for (k = startI[2]; k < endI[2]; k++) {
527: for (j = startI[1]; j < endI[1]; j++) {
528: for (i = startI[0]; i < endI[0]; i++) {
529: PetscMPIInt rank_ijk_re, rank_reI[3];
530: PetscInt s0_re;
531: PetscInt ii, jj, kk, local_ijk_re, mapped_ijk;
532: PetscInt lenI_re[3];
534: location = (i - startI[0]) + (j - startI[1]) * lenI[0] + (k - startI[2]) * lenI[0] * lenI[1];
535: PetscCall(_DMDADetermineRankFromGlobalIJK(3, i, j, k, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->start_i_re, ctx->start_j_re, ctx->start_k_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &rank_reI[0], &rank_reI[1], &rank_reI[2], &rank_ijk_re));
536: PetscCall(_DMDADetermineGlobalS0(3, rank_ijk_re, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &s0_re));
537: ii = i - ctx->start_i_re[rank_reI[0]];
538: PetscCheck(ii >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error ii");
539: jj = j - ctx->start_j_re[rank_reI[1]];
540: PetscCheck(jj >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error jj");
541: kk = k - ctx->start_k_re[rank_reI[2]];
542: PetscCheck(kk >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error kk");
543: lenI_re[0] = ctx->range_i_re[rank_reI[0]];
544: lenI_re[1] = ctx->range_j_re[rank_reI[1]];
545: lenI_re[2] = ctx->range_k_re[rank_reI[2]];
546: local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
547: mapped_ijk = s0_re + local_ijk_re;
548: PetscCall(MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES));
549: }
550: }
551: }
552: PetscCall(MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY));
553: PetscCall(MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY));
554: PetscCall(MatCreateMAIJ(Pscalar, ndof, &P));
555: PetscCall(MatDestroy(&Pscalar));
556: ctx->permutation = P;
557: PetscFunctionReturn(PETSC_SUCCESS);
558: }
560: static PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
561: {
562: DM dm;
563: MPI_Comm comm;
564: Mat Pscalar, P;
565: PetscInt ndof;
566: PetscInt i, j, location, startI[2], endI[2], lenI[2], nx, ny, nz;
567: PetscInt sr, er, Mr;
568: Vec V;
570: PetscFunctionBegin;
571: PetscCall(PetscInfo(pc, "PCTelescope: setting up the permutation matrix (DMDA-2D)\n"));
572: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
573: PetscCall(PCGetDM(pc, &dm));
574: PetscCall(DMDAGetInfo(dm, NULL, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL));
575: PetscCall(DMGetGlobalVector(dm, &V));
576: PetscCall(VecGetSize(V, &Mr));
577: PetscCall(VecGetOwnershipRange(V, &sr, &er));
578: PetscCall(DMRestoreGlobalVector(dm, &V));
579: sr = sr / ndof;
580: er = er / ndof;
581: Mr = Mr / ndof;
583: PetscCall(MatCreate(comm, &Pscalar));
584: PetscCall(MatSetSizes(Pscalar, er - sr, er - sr, Mr, Mr));
585: PetscCall(MatSetType(Pscalar, MATAIJ));
586: PetscCall(MatSeqAIJSetPreallocation(Pscalar, 1, NULL));
587: PetscCall(MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL));
589: PetscCall(DMDAGetCorners(dm, NULL, NULL, NULL, &lenI[0], &lenI[1], NULL));
590: PetscCall(DMDAGetCorners(dm, &startI[0], &startI[1], NULL, &endI[0], &endI[1], NULL));
591: endI[0] += startI[0];
592: endI[1] += startI[1];
594: for (j = startI[1]; j < endI[1]; j++) {
595: for (i = startI[0]; i < endI[0]; i++) {
596: PetscMPIInt rank_ijk_re, rank_reI[3];
597: PetscInt s0_re;
598: PetscInt ii, jj, local_ijk_re, mapped_ijk;
599: PetscInt lenI_re[3];
601: location = (i - startI[0]) + (j - startI[1]) * lenI[0];
602: PetscCall(_DMDADetermineRankFromGlobalIJK(2, i, j, 0, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->start_i_re, ctx->start_j_re, ctx->start_k_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &rank_reI[0], &rank_reI[1], NULL, &rank_ijk_re));
604: PetscCall(_DMDADetermineGlobalS0(2, rank_ijk_re, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &s0_re));
606: ii = i - ctx->start_i_re[rank_reI[0]];
607: PetscCheck(ii >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error ii");
608: jj = j - ctx->start_j_re[rank_reI[1]];
609: PetscCheck(jj >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error jj");
611: lenI_re[0] = ctx->range_i_re[rank_reI[0]];
612: lenI_re[1] = ctx->range_j_re[rank_reI[1]];
613: local_ijk_re = ii + jj * lenI_re[0];
614: mapped_ijk = s0_re + local_ijk_re;
615: PetscCall(MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES));
616: }
617: }
618: PetscCall(MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY));
619: PetscCall(MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY));
620: PetscCall(MatCreateMAIJ(Pscalar, ndof, &P));
621: PetscCall(MatDestroy(&Pscalar));
622: ctx->permutation = P;
623: PetscFunctionReturn(PETSC_SUCCESS);
624: }
626: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
627: {
628: Vec xred, yred, xtmp, x, xp;
629: VecScatter scatter;
630: IS isin;
631: Mat B;
632: PetscInt m, bs, st, ed;
633: MPI_Comm comm;
635: PetscFunctionBegin;
636: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
637: PetscCall(PCGetOperators(pc, NULL, &B));
638: PetscCall(MatCreateVecs(B, &x, NULL));
639: PetscCall(MatGetBlockSize(B, &bs));
640: PetscCall(VecDuplicate(x, &xp));
641: m = 0;
642: xred = NULL;
643: yred = NULL;
644: if (PCTelescope_isActiveRank(sred)) {
645: PetscCall(DMCreateGlobalVector(ctx->dmrepart, &xred));
646: PetscCall(VecDuplicate(xred, &yred));
647: PetscCall(VecGetOwnershipRange(xred, &st, &ed));
648: PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
649: PetscCall(VecGetLocalSize(xred, &m));
650: } else {
651: PetscCall(VecGetOwnershipRange(x, &st, &ed));
652: PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
653: }
654: PetscCall(ISSetBlockSize(isin, bs));
655: PetscCall(VecCreate(comm, &xtmp));
656: PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
657: PetscCall(VecSetBlockSize(xtmp, bs));
658: PetscCall(VecSetType(xtmp, ((PetscObject)x)->type_name));
659: PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
660: sred->xred = xred;
661: sred->yred = yred;
662: sred->isin = isin;
663: sred->scatter = scatter;
664: sred->xtmp = xtmp;
666: ctx->xp = xp;
667: PetscCall(VecDestroy(&x));
668: PetscFunctionReturn(PETSC_SUCCESS);
669: }
671: PetscErrorCode PCTelescopeSetUp_dmda(PC pc, PC_Telescope sred)
672: {
673: PC_Telescope_DMDACtx *ctx;
674: PetscInt dim;
675: DM dm;
676: MPI_Comm comm;
678: PetscFunctionBegin;
679: PetscCall(PetscInfo(pc, "PCTelescope: setup (DMDA)\n"));
680: PetscCall(PetscNew(&ctx));
681: sred->dm_ctx = (void *)ctx;
683: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
684: PetscCall(PCGetDM(pc, &dm));
685: PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
687: PetscCall(PCTelescopeSetUp_dmda_repart(pc, sred, ctx));
688: PetscCall(PCTelescopeSetUp_dmda_repart_coors(pc, sred, ctx));
689: switch (dim) {
690: case 1:
691: SETERRQ(comm, PETSC_ERR_SUP, "Telescope: DMDA (1D) repartitioning not provided");
692: case 2:
693: PetscCall(PCTelescopeSetUp_dmda_permutation_2d(pc, sred, ctx));
694: break;
695: case 3:
696: PetscCall(PCTelescopeSetUp_dmda_permutation_3d(pc, sred, ctx));
697: break;
698: }
699: PetscCall(PCTelescopeSetUp_dmda_scatters(pc, sred, ctx));
700: PetscFunctionReturn(PETSC_SUCCESS);
701: }
703: static PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
704: {
705: PC_Telescope_DMDACtx *ctx;
706: MPI_Comm comm, subcomm;
707: Mat Bperm, Bred, B, P;
708: PetscInt nr, nc;
709: IS isrow, iscol;
710: Mat Blocal, *_Blocal;
712: PetscFunctionBegin;
713: PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (DMDA)\n"));
714: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
715: subcomm = PetscSubcommChild(sred->psubcomm);
716: ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
718: PetscCall(PCGetOperators(pc, NULL, &B));
719: PetscCall(MatGetSize(B, &nr, &nc));
721: P = ctx->permutation;
722: PetscCall(MatPtAP(B, P, MAT_INITIAL_MATRIX, 1.1, &Bperm));
724: /* Get submatrices */
725: isrow = sred->isin;
726: PetscCall(ISCreateStride(comm, nc, 0, 1, &iscol));
728: PetscCall(MatCreateSubMatrices(Bperm, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
729: Blocal = *_Blocal;
730: Bred = NULL;
731: if (PCTelescope_isActiveRank(sred)) {
732: PetscInt mm;
734: if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
735: PetscCall(MatGetSize(Blocal, &mm, NULL));
736: /* PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred)); */
737: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
738: }
739: *A = Bred;
741: PetscCall(ISDestroy(&iscol));
742: PetscCall(MatDestroy(&Bperm));
743: PetscCall(MatDestroyMatrices(1, &_Blocal));
744: PetscFunctionReturn(PETSC_SUCCESS);
745: }
747: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
748: {
749: DM dm;
750: PetscErrorCode (*dmksp_func)(KSP, Mat, Mat, void *);
751: void *dmksp_ctx;
753: PetscFunctionBegin;
754: PetscCall(PCGetDM(pc, &dm));
755: PetscCall(DMKSPGetComputeOperators(dm, &dmksp_func, &dmksp_ctx));
756: /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
757: if (dmksp_func && !sred->ignore_kspcomputeoperators) {
758: DM dmrepart;
759: Mat Ak;
761: *A = NULL;
762: if (PCTelescope_isActiveRank(sred)) {
763: PetscCall(KSPGetDM(sred->ksp, &dmrepart));
764: if (reuse == MAT_INITIAL_MATRIX) {
765: PetscCall(DMCreateMatrix(dmrepart, &Ak));
766: *A = Ak;
767: } else if (reuse == MAT_REUSE_MATRIX) {
768: Ak = *A;
769: }
770: /*
771: There is no need to explicitly assemble the operator now,
772: the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
773: */
774: }
775: } else {
776: PetscCall(PCTelescopeMatCreate_dmda_dmactivefalse(pc, sred, reuse, A));
777: }
778: PetscFunctionReturn(PETSC_SUCCESS);
779: }
781: static PetscErrorCode PCTelescopeSubNullSpaceCreate_dmda_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
782: {
783: PetscBool has_const;
784: PetscInt i, k, n = 0;
785: const Vec *vecs;
786: Vec *sub_vecs = NULL;
787: MPI_Comm subcomm;
788: PC_Telescope_DMDACtx *ctx;
790: PetscFunctionBegin;
791: ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
792: subcomm = PetscSubcommChild(sred->psubcomm);
793: PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
795: if (PCTelescope_isActiveRank(sred)) {
796: /* create new vectors */
797: if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
798: }
800: /* copy entries */
801: for (k = 0; k < n; k++) {
802: const PetscScalar *x_array;
803: PetscScalar *LA_sub_vec;
804: PetscInt st, ed;
806: /* permute vector into ordering associated with re-partitioned dmda */
807: PetscCall(MatMultTranspose(ctx->permutation, vecs[k], ctx->xp));
809: /* pull in vector x->xtmp */
810: PetscCall(VecScatterBegin(sred->scatter, ctx->xp, sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
811: PetscCall(VecScatterEnd(sred->scatter, ctx->xp, sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
813: /* copy vector entries into xred */
814: PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
815: if (sub_vecs) {
816: if (sub_vecs[k]) {
817: PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
818: PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
819: for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
820: PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
821: }
822: }
823: PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
824: }
826: if (PCTelescope_isActiveRank(sred)) {
827: /* create new (near) nullspace for redundant object */
828: PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
829: PetscCall(VecDestroyVecs(n, &sub_vecs));
830: PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
831: PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
832: }
833: PetscFunctionReturn(PETSC_SUCCESS);
834: }
836: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc, PC_Telescope sred, Mat sub_mat)
837: {
838: Mat B;
840: PetscFunctionBegin;
841: PetscCall(PCGetOperators(pc, NULL, &B));
842: {
843: MatNullSpace nullspace, sub_nullspace;
844: PetscCall(MatGetNullSpace(B, &nullspace));
845: if (nullspace) {
846: PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (DMDA)\n"));
847: PetscCall(PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc, sred, nullspace, &sub_nullspace));
848: if (PCTelescope_isActiveRank(sred)) {
849: PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
850: PetscCall(MatNullSpaceDestroy(&sub_nullspace));
851: }
852: }
853: }
854: {
855: MatNullSpace nearnullspace, sub_nearnullspace;
856: PetscCall(MatGetNearNullSpace(B, &nearnullspace));
857: if (nearnullspace) {
858: PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (DMDA)\n"));
859: PetscCall(PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
860: if (PCTelescope_isActiveRank(sred)) {
861: PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
862: PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
863: }
864: }
865: }
866: PetscFunctionReturn(PETSC_SUCCESS);
867: }
869: PetscErrorCode PCApply_Telescope_dmda(PC pc, Vec x, Vec y)
870: {
871: PC_Telescope sred = (PC_Telescope)pc->data;
872: Mat perm;
873: Vec xtmp, xp, xred, yred;
874: PetscInt i, st, ed;
875: VecScatter scatter;
876: PetscScalar *array;
877: const PetscScalar *x_array;
878: PC_Telescope_DMDACtx *ctx;
880: ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
881: xtmp = sred->xtmp;
882: scatter = sred->scatter;
883: xred = sred->xred;
884: yred = sred->yred;
885: perm = ctx->permutation;
886: xp = ctx->xp;
888: PetscFunctionBegin;
889: PetscCall(PetscCitationsRegister(citation, &cited));
891: /* permute vector into ordering associated with re-partitioned dmda */
892: PetscCall(MatMultTranspose(perm, x, xp));
894: /* pull in vector x->xtmp */
895: PetscCall(VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
896: PetscCall(VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
898: /* copy vector entries into xred */
899: PetscCall(VecGetArrayRead(xtmp, &x_array));
900: if (xred) {
901: PetscScalar *LA_xred;
902: PetscCall(VecGetOwnershipRange(xred, &st, &ed));
904: PetscCall(VecGetArray(xred, &LA_xred));
905: for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
906: PetscCall(VecRestoreArray(xred, &LA_xred));
907: }
908: PetscCall(VecRestoreArrayRead(xtmp, &x_array));
910: /* solve */
911: if (PCTelescope_isActiveRank(sred)) {
912: PetscCall(KSPSolve(sred->ksp, xred, yred));
913: PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
914: }
916: /* return vector */
917: PetscCall(VecGetArray(xtmp, &array));
918: if (yred) {
919: const PetscScalar *LA_yred;
920: PetscCall(VecGetOwnershipRange(yred, &st, &ed));
921: PetscCall(VecGetArrayRead(yred, &LA_yred));
922: for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
923: PetscCall(VecRestoreArrayRead(yred, &LA_yred));
924: }
925: PetscCall(VecRestoreArray(xtmp, &array));
926: PetscCall(VecScatterBegin(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
927: PetscCall(VecScatterEnd(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
928: PetscCall(MatMult(perm, xp, y));
929: PetscFunctionReturn(PETSC_SUCCESS);
930: }
932: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
933: {
934: PC_Telescope sred = (PC_Telescope)pc->data;
935: Mat perm;
936: Vec xtmp, xp, yred;
937: PetscInt i, st, ed;
938: VecScatter scatter;
939: const PetscScalar *x_array;
940: PetscBool default_init_guess_value = PETSC_FALSE;
941: PC_Telescope_DMDACtx *ctx;
943: PetscFunctionBegin;
944: ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
945: xtmp = sred->xtmp;
946: scatter = sred->scatter;
947: yred = sred->yred;
948: perm = ctx->permutation;
949: xp = ctx->xp;
951: PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope_dmda only supports max_it = 1");
952: *reason = (PCRichardsonConvergedReason)0;
954: if (!zeroguess) {
955: PetscCall(PetscInfo(pc, "PCTelescopeDMDA: Scattering y for non-zero-initial guess\n"));
956: /* permute vector into ordering associated with re-partitioned dmda */
957: PetscCall(MatMultTranspose(perm, y, xp));
959: /* pull in vector x->xtmp */
960: PetscCall(VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
961: PetscCall(VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
963: /* copy vector entries into xred */
964: PetscCall(VecGetArrayRead(xtmp, &x_array));
965: if (yred) {
966: PetscScalar *LA_yred;
967: PetscCall(VecGetOwnershipRange(yred, &st, &ed));
968: PetscCall(VecGetArray(yred, &LA_yred));
969: for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
970: PetscCall(VecRestoreArray(yred, &LA_yred));
971: }
972: PetscCall(VecRestoreArrayRead(xtmp, &x_array));
973: }
975: if (PCTelescope_isActiveRank(sred)) {
976: PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
977: if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
978: }
980: PetscCall(PCApply_Telescope_dmda(pc, x, y));
982: if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
984: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
985: *outits = 1;
986: PetscFunctionReturn(PETSC_SUCCESS);
987: }
989: PetscErrorCode PCReset_Telescope_dmda(PC pc)
990: {
991: PC_Telescope sred = (PC_Telescope)pc->data;
992: PC_Telescope_DMDACtx *ctx;
994: PetscFunctionBegin;
995: ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
996: PetscCall(VecDestroy(&ctx->xp));
997: PetscCall(MatDestroy(&ctx->permutation));
998: PetscCall(DMDestroy(&ctx->dmrepart));
999: PetscCall(PetscFree3(ctx->range_i_re, ctx->range_j_re, ctx->range_k_re));
1000: PetscCall(PetscFree3(ctx->start_i_re, ctx->start_j_re, ctx->start_k_re));
1001: PetscFunctionReturn(PETSC_SUCCESS);
1002: }
1004: static PetscErrorCode DMView_DA_Short_3d(DM dm, PetscViewer v)
1005: {
1006: PetscInt M, N, P, m, n, p, ndof, nsw;
1007: MPI_Comm comm;
1008: PetscMPIInt size;
1009: const char *prefix;
1011: PetscFunctionBegin;
1012: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1013: PetscCallMPI(MPI_Comm_size(comm, &size));
1014: PetscCall(DMGetOptionsPrefix(dm, &prefix));
1015: PetscCall(DMDAGetInfo(dm, NULL, &M, &N, &P, &m, &n, &p, &ndof, &nsw, NULL, NULL, NULL, NULL));
1016: if (prefix) PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object: (%s) %d MPI processes\n", prefix, size));
1017: else PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object: %d MPI processes\n", size));
1018: PetscCall(PetscViewerASCIIPrintf(v, " M %" PetscInt_FMT " N %" PetscInt_FMT " P %" PetscInt_FMT " m %" PetscInt_FMT " n %" PetscInt_FMT " p %" PetscInt_FMT " dof %" PetscInt_FMT " overlap %" PetscInt_FMT "\n", M, N, P, m, n, p, ndof, nsw));
1019: PetscFunctionReturn(PETSC_SUCCESS);
1020: }
1022: static PetscErrorCode DMView_DA_Short_2d(DM dm, PetscViewer v)
1023: {
1024: PetscInt M, N, m, n, ndof, nsw;
1025: MPI_Comm comm;
1026: PetscMPIInt size;
1027: const char *prefix;
1029: PetscFunctionBegin;
1030: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1031: PetscCallMPI(MPI_Comm_size(comm, &size));
1032: PetscCall(DMGetOptionsPrefix(dm, &prefix));
1033: PetscCall(DMDAGetInfo(dm, NULL, &M, &N, NULL, &m, &n, NULL, &ndof, &nsw, NULL, NULL, NULL, NULL));
1034: if (prefix) PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object: (%s) %d MPI processes\n", prefix, size));
1035: else PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object: %d MPI processes\n", size));
1036: PetscCall(PetscViewerASCIIPrintf(v, " M %" PetscInt_FMT " N %" PetscInt_FMT " m %" PetscInt_FMT " n %" PetscInt_FMT " dof %" PetscInt_FMT " overlap %" PetscInt_FMT "\n", M, N, m, n, ndof, nsw));
1037: PetscFunctionReturn(PETSC_SUCCESS);
1038: }
1040: PetscErrorCode DMView_DA_Short(DM dm, PetscViewer v)
1041: {
1042: PetscInt dim;
1044: PetscFunctionBegin;
1045: PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1046: switch (dim) {
1047: case 2:
1048: PetscCall(DMView_DA_Short_2d(dm, v));
1049: break;
1050: case 3:
1051: PetscCall(DMView_DA_Short_3d(dm, v));
1052: break;
1053: }
1054: PetscFunctionReturn(PETSC_SUCCESS);
1055: }