Actual source code: asm.c
petsc-3.13.0 2020-03-29
1: /*
2: This file defines an additive Schwarz preconditioner for any Mat implementation.
4: Note that each processor may have any number of subdomains. But in order to
5: deal easily with the VecScatter(), we treat each processor as if it has the
6: same number of subdomains.
8: n - total number of true subdomains on all processors
9: n_local_true - actual number of subdomains on this processor
10: n_local = maximum over all processors of n_local_true
11: */
12: #include <petsc/private/pcimpl.h>
13: #include <petscdm.h>
15: typedef struct {
16: PetscInt n, n_local, n_local_true;
17: PetscInt overlap; /* overlap requested by user */
18: KSP *ksp; /* linear solvers for each block */
19: VecScatter restriction; /* mapping from global to overlapping (process) subdomain*/
20: VecScatter *lrestriction; /* mapping from subregion to overlapping (process) subdomain */
21: VecScatter *lprolongation; /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22: Vec lx, ly; /* work vectors */
23: Vec *x,*y; /* work vectors */
24: IS lis; /* index set that defines each overlapping multiplicative (process) subdomain */
25: IS *is; /* index set that defines each overlapping subdomain */
26: IS *is_local; /* index set that defines each non-overlapping subdomain, may be NULL */
27: Mat *mat,*pmat; /* mat is not currently used */
28: PCASMType type; /* use reduced interpolation, restriction or both */
29: PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */
30: PetscBool same_local_solves; /* flag indicating whether all local solvers are same */
31: PetscBool sort_indices; /* flag to sort subdomain indices */
32: PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */
33: PCCompositeType loctype; /* the type of composition for local solves */
34: MatType sub_mat_type; /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35: /* For multiplicative solve */
36: Mat *lmats; /* submatrices for overlapping multiplicative (process) subdomain */
37: } PC_ASM;
39: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40: {
41: PC_ASM *osm = (PC_ASM*)pc->data;
43: PetscMPIInt rank;
44: PetscInt i,bsz;
45: PetscBool iascii,isstring;
46: PetscViewer sviewer;
49: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
50: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
51: if (iascii) {
52: char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53: if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
54: if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
55: PetscViewerASCIIPrintf(viewer," %s, %s\n",blocks,overlaps);
56: PetscViewerASCIIPrintf(viewer," restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
57: if (osm->dm_subdomains) {PetscViewerASCIIPrintf(viewer," Additive Schwarz: using DM to define subdomains\n");}
58: if (osm->loctype != PC_COMPOSITE_ADDITIVE) {PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);}
59: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
60: if (osm->same_local_solves) {
61: if (osm->ksp) {
62: PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");
63: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
64: if (!rank) {
65: PetscViewerASCIIPushTab(viewer);
66: KSPView(osm->ksp[0],sviewer);
67: PetscViewerASCIIPopTab(viewer);
68: }
69: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
70: }
71: } else {
72: PetscViewerASCIIPushSynchronized(viewer);
73: PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
74: PetscViewerFlush(viewer);
75: PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");
76: PetscViewerASCIIPushTab(viewer);
77: PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
78: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
79: for (i=0; i<osm->n_local_true; i++) {
80: ISGetLocalSize(osm->is[i],&bsz);
81: PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
82: KSPView(osm->ksp[i],sviewer);
83: PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
84: }
85: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
86: PetscViewerASCIIPopTab(viewer);
87: PetscViewerFlush(viewer);
88: PetscViewerASCIIPopSynchronized(viewer);
89: }
90: } else if (isstring) {
91: PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
92: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
93: if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
94: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
95: }
96: return(0);
97: }
99: static PetscErrorCode PCASMPrintSubdomains(PC pc)
100: {
101: PC_ASM *osm = (PC_ASM*)pc->data;
102: const char *prefix;
103: char fname[PETSC_MAX_PATH_LEN+1];
104: PetscViewer viewer, sviewer;
105: char *s;
106: PetscInt i,j,nidx;
107: const PetscInt *idx;
108: PetscMPIInt rank, size;
112: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
113: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
114: PCGetOptionsPrefix(pc,&prefix);
115: PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);
116: if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
117: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
118: for (i=0; i<osm->n_local; i++) {
119: if (i < osm->n_local_true) {
120: ISGetLocalSize(osm->is[i],&nidx);
121: ISGetIndices(osm->is[i],&idx);
122: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123: #define len 16*(nidx+1)+512
124: PetscMalloc1(len,&s);
125: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
126: #undef len
127: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
128: for (j=0; j<nidx; j++) {
129: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
130: }
131: ISRestoreIndices(osm->is[i],&idx);
132: PetscViewerStringSPrintf(sviewer,"\n");
133: PetscViewerDestroy(&sviewer);
134: PetscViewerASCIIPushSynchronized(viewer);
135: PetscViewerASCIISynchronizedPrintf(viewer, s);
136: PetscViewerFlush(viewer);
137: PetscViewerASCIIPopSynchronized(viewer);
138: PetscFree(s);
139: if (osm->is_local) {
140: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141: #define len 16*(nidx+1)+512
142: PetscMalloc1(len, &s);
143: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
144: #undef len
145: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
146: ISGetLocalSize(osm->is_local[i],&nidx);
147: ISGetIndices(osm->is_local[i],&idx);
148: for (j=0; j<nidx; j++) {
149: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
150: }
151: ISRestoreIndices(osm->is_local[i],&idx);
152: PetscViewerStringSPrintf(sviewer,"\n");
153: PetscViewerDestroy(&sviewer);
154: PetscViewerASCIIPushSynchronized(viewer);
155: PetscViewerASCIISynchronizedPrintf(viewer, s);
156: PetscViewerFlush(viewer);
157: PetscViewerASCIIPopSynchronized(viewer);
158: PetscFree(s);
159: }
160: } else {
161: /* Participate in collective viewer calls. */
162: PetscViewerASCIIPushSynchronized(viewer);
163: PetscViewerFlush(viewer);
164: PetscViewerASCIIPopSynchronized(viewer);
165: /* Assume either all ranks have is_local or none do. */
166: if (osm->is_local) {
167: PetscViewerASCIIPushSynchronized(viewer);
168: PetscViewerFlush(viewer);
169: PetscViewerASCIIPopSynchronized(viewer);
170: }
171: }
172: }
173: PetscViewerFlush(viewer);
174: PetscViewerDestroy(&viewer);
175: return(0);
176: }
178: static PetscErrorCode PCSetUp_ASM(PC pc)
179: {
180: PC_ASM *osm = (PC_ASM*)pc->data;
182: PetscBool flg;
183: PetscInt i,m,m_local;
184: MatReuse scall = MAT_REUSE_MATRIX;
185: IS isl;
186: KSP ksp;
187: PC subpc;
188: const char *prefix,*pprefix;
189: Vec vec;
190: DM *domain_dm = NULL;
193: if (!pc->setupcalled) {
194: PetscInt m;
196: /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
197: if (osm->n_local_true == PETSC_DECIDE) {
198: /* no subdomains given */
199: /* try pc->dm first, if allowed */
200: if (osm->dm_subdomains && pc->dm) {
201: PetscInt num_domains, d;
202: char **domain_names;
203: IS *inner_domain_is, *outer_domain_is;
204: DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
205: osm->overlap = -1; /* We do not want to increase the overlap of the IS.
206: A future improvement of this code might allow one to use
207: DM-defined subdomains and also increase the overlap,
208: but that is not currently supported */
209: if (num_domains) {
210: PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
211: }
212: for (d = 0; d < num_domains; ++d) {
213: if (domain_names) {PetscFree(domain_names[d]);}
214: if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
215: if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
216: }
217: PetscFree(domain_names);
218: PetscFree(inner_domain_is);
219: PetscFree(outer_domain_is);
220: }
221: if (osm->n_local_true == PETSC_DECIDE) {
222: /* still no subdomains; use one subdomain per processor */
223: osm->n_local_true = 1;
224: }
225: }
226: { /* determine the global and max number of subdomains */
227: struct {PetscInt max,sum;} inwork,outwork;
228: PetscMPIInt size;
230: inwork.max = osm->n_local_true;
231: inwork.sum = osm->n_local_true;
232: MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));
233: osm->n_local = outwork.max;
234: osm->n = outwork.sum;
236: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
237: if (outwork.max == 1 && outwork.sum == size) {
238: /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
239: MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);
240: }
241: }
242: if (!osm->is) { /* create the index sets */
243: PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
244: }
245: if (osm->n_local_true > 1 && !osm->is_local) {
246: PetscMalloc1(osm->n_local_true,&osm->is_local);
247: for (i=0; i<osm->n_local_true; i++) {
248: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
249: ISDuplicate(osm->is[i],&osm->is_local[i]);
250: ISCopy(osm->is[i],osm->is_local[i]);
251: } else {
252: PetscObjectReference((PetscObject)osm->is[i]);
253: osm->is_local[i] = osm->is[i];
254: }
255: }
256: }
257: PCGetOptionsPrefix(pc,&prefix);
258: flg = PETSC_FALSE;
259: PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);
260: if (flg) { PCASMPrintSubdomains(pc); }
262: if (osm->overlap > 0) {
263: /* Extend the "overlapping" regions by a number of steps */
264: MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
265: }
266: if (osm->sort_indices) {
267: for (i=0; i<osm->n_local_true; i++) {
268: ISSort(osm->is[i]);
269: if (osm->is_local) {
270: ISSort(osm->is_local[i]);
271: }
272: }
273: }
275: if (!osm->ksp) {
276: /* Create the local solvers */
277: PetscMalloc1(osm->n_local_true,&osm->ksp);
278: if (domain_dm) {
279: PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
280: }
281: for (i=0; i<osm->n_local_true; i++) {
282: KSPCreate(PETSC_COMM_SELF,&ksp);
283: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
284: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
285: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
286: KSPSetType(ksp,KSPPREONLY);
287: KSPGetPC(ksp,&subpc);
288: PCGetOptionsPrefix(pc,&prefix);
289: KSPSetOptionsPrefix(ksp,prefix);
290: KSPAppendOptionsPrefix(ksp,"sub_");
291: if (domain_dm) {
292: KSPSetDM(ksp, domain_dm[i]);
293: KSPSetDMActive(ksp, PETSC_FALSE);
294: DMDestroy(&domain_dm[i]);
295: }
296: osm->ksp[i] = ksp;
297: }
298: if (domain_dm) {
299: PetscFree(domain_dm);
300: }
301: }
303: ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);
304: ISSortRemoveDups(osm->lis);
305: ISGetLocalSize(osm->lis, &m);
306: VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);
307: VecDuplicate(osm->lx, &osm->ly);
309: scall = MAT_INITIAL_MATRIX;
310: } else {
311: /*
312: Destroy the blocks from the previous iteration
313: */
314: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
315: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
316: scall = MAT_INITIAL_MATRIX;
317: }
318: }
320: /*
321: Extract out the submatrices
322: */
323: MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
324: if (scall == MAT_INITIAL_MATRIX) {
325: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
326: for (i=0; i<osm->n_local_true; i++) {
327: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
328: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
329: }
330: }
332: /* Convert the types of the submatrices (if needbe) */
333: if (osm->sub_mat_type) {
334: for (i=0; i<osm->n_local_true; i++) {
335: MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));
336: }
337: }
339: if(!pc->setupcalled){
340: /* Create the local work vectors (from the local matrices) and scatter contexts */
341: MatCreateVecs(pc->pmat,&vec,0);
343: if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
344: if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
345: PetscMalloc1(osm->n_local_true,&osm->lprolongation);
346: }
347: PetscMalloc1(osm->n_local_true,&osm->lrestriction);
348: PetscMalloc1(osm->n_local_true,&osm->x);
349: PetscMalloc1(osm->n_local_true,&osm->y);
351: ISGetLocalSize(osm->lis,&m);
352: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
353: VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);
354: ISDestroy(&isl);
357: for (i=0; i<osm->n_local_true; ++i) {
358: ISLocalToGlobalMapping ltog;
359: IS isll;
360: const PetscInt *idx_is;
361: PetscInt *idx_lis,nout;
363: ISGetLocalSize(osm->is[i],&m);
364: MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);
365: VecDuplicate(osm->x[i],&osm->y[i]);
367: /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
368: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
369: ISGetLocalSize(osm->is[i],&m);
370: ISGetIndices(osm->is[i], &idx_is);
371: PetscMalloc1(m,&idx_lis);
372: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);
373: if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
374: ISRestoreIndices(osm->is[i], &idx_is);
375: ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);
376: ISLocalToGlobalMappingDestroy(<og);
377: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
378: VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);
379: ISDestroy(&isll);
380: ISDestroy(&isl);
381: if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
382: ISLocalToGlobalMapping ltog;
383: IS isll,isll_local;
384: const PetscInt *idx_local;
385: PetscInt *idx1, *idx2, nout;
387: ISGetLocalSize(osm->is_local[i],&m_local);
388: ISGetIndices(osm->is_local[i], &idx_local);
390: ISLocalToGlobalMappingCreateIS(osm->is[i],<og);
391: PetscMalloc1(m_local,&idx1);
392: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);
393: ISLocalToGlobalMappingDestroy(<og);
394: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
395: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);
397: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
398: PetscMalloc1(m_local,&idx2);
399: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);
400: ISLocalToGlobalMappingDestroy(<og);
401: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
402: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);
404: ISRestoreIndices(osm->is_local[i], &idx_local);
405: VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);
407: ISDestroy(&isll);
408: ISDestroy(&isll_local);
409: }
410: }
411: VecDestroy(&vec);
412: }
414: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
415: IS *cis;
416: PetscInt c;
418: PetscMalloc1(osm->n_local_true, &cis);
419: for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
420: MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);
421: PetscFree(cis);
422: }
424: /* Return control to the user so that the submatrices can be modified (e.g., to apply
425: different boundary conditions for the submatrices than for the global problem) */
426: PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);
428: /*
429: Loop over subdomains putting them into local ksp
430: */
431: for (i=0; i<osm->n_local_true; i++) {
432: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
433: if (!pc->setupcalled) {
434: KSPSetFromOptions(osm->ksp[i]);
435: }
436: }
437: return(0);
438: }
440: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
441: {
442: PC_ASM *osm = (PC_ASM*)pc->data;
443: PetscErrorCode ierr;
444: PetscInt i;
445: KSPConvergedReason reason;
448: for (i=0; i<osm->n_local_true; i++) {
449: KSPSetUp(osm->ksp[i]);
450: KSPGetConvergedReason(osm->ksp[i],&reason);
451: if (reason == KSP_DIVERGED_PC_FAILED) {
452: pc->failedreason = PC_SUBPC_ERROR;
453: }
454: }
455: return(0);
456: }
458: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
459: {
460: PC_ASM *osm = (PC_ASM*)pc->data;
462: PetscInt i,n_local_true = osm->n_local_true;
463: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
466: /*
467: Support for limiting the restriction or interpolation to only local
468: subdomain values (leaving the other values 0).
469: */
470: if (!(osm->type & PC_ASM_RESTRICT)) {
471: forward = SCATTER_FORWARD_LOCAL;
472: /* have to zero the work RHS since scatter may leave some slots empty */
473: VecSet(osm->lx, 0.0);
474: }
475: if (!(osm->type & PC_ASM_INTERPOLATE)) {
476: reverse = SCATTER_REVERSE_LOCAL;
477: }
479: if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
480: /* zero the global and the local solutions */
481: VecZeroEntries(y);
482: VecSet(osm->ly, 0.0);
484: /* Copy the global RHS to local RHS including the ghost nodes */
485: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
486: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
488: /* Restrict local RHS to the overlapping 0-block RHS */
489: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
490: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
492: /* do the local solves */
493: for (i = 0; i < n_local_true; ++i) {
495: /* solve the overlapping i-block */
496: PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
497: KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);
498: KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);
499: PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
501: if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
502: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
503: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
504: }
505: else{ /* interpolate the overalapping i-block solution to the local solution */
506: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
507: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
508: }
510: if (i < n_local_true-1) {
511: /* Restrict local RHS to the overlapping (i+1)-block RHS */
512: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
513: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
515: if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
516: /* update the overlapping (i+1)-block RHS using the current local solution */
517: MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);
518: VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]);
519: }
520: }
521: }
522: /* Add the local solution to the global solution including the ghost nodes */
523: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
524: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
525: }else{
526: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
527: }
528: return(0);
529: }
531: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
532: {
533: PC_ASM *osm = (PC_ASM*)pc->data;
535: PetscInt i,n_local_true = osm->n_local_true;
536: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
539: /*
540: Support for limiting the restriction or interpolation to only local
541: subdomain values (leaving the other values 0).
543: Note: these are reversed from the PCApply_ASM() because we are applying the
544: transpose of the three terms
545: */
547: if (!(osm->type & PC_ASM_INTERPOLATE)) {
548: forward = SCATTER_FORWARD_LOCAL;
549: /* have to zero the work RHS since scatter may leave some slots empty */
550: VecSet(osm->lx, 0.0);
551: }
552: if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
554: /* zero the global and the local solutions */
555: VecZeroEntries(y);
556: VecSet(osm->ly, 0.0);
558: /* Copy the global RHS to local RHS including the ghost nodes */
559: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
560: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
562: /* Restrict local RHS to the overlapping 0-block RHS */
563: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
564: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
566: /* do the local solves */
567: for (i = 0; i < n_local_true; ++i) {
569: /* solve the overlapping i-block */
570: PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
571: KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);
572: KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);
573: PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
575: if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
576: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
577: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
578: }
579: else{ /* interpolate the overalapping i-block solution to the local solution */
580: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
581: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
582: }
584: if (i < n_local_true-1) {
585: /* Restrict local RHS to the overlapping (i+1)-block RHS */
586: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
587: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
588: }
589: }
590: /* Add the local solution to the global solution including the ghost nodes */
591: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
592: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
594: return(0);
596: }
598: static PetscErrorCode PCReset_ASM(PC pc)
599: {
600: PC_ASM *osm = (PC_ASM*)pc->data;
602: PetscInt i;
605: if (osm->ksp) {
606: for (i=0; i<osm->n_local_true; i++) {
607: KSPReset(osm->ksp[i]);
608: }
609: }
610: if (osm->pmat) {
611: if (osm->n_local_true > 0) {
612: MatDestroySubMatrices(osm->n_local_true,&osm->pmat);
613: }
614: }
615: if (osm->lrestriction) {
616: VecScatterDestroy(&osm->restriction);
617: for (i=0; i<osm->n_local_true; i++) {
618: VecScatterDestroy(&osm->lrestriction[i]);
619: if (osm->lprolongation) {VecScatterDestroy(&osm->lprolongation[i]);}
620: VecDestroy(&osm->x[i]);
621: VecDestroy(&osm->y[i]);
622: }
623: PetscFree(osm->lrestriction);
624: if (osm->lprolongation) {PetscFree(osm->lprolongation);}
625: PetscFree(osm->x);
626: PetscFree(osm->y);
628: }
629: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
630: ISDestroy(&osm->lis);
631: VecDestroy(&osm->lx);
632: VecDestroy(&osm->ly);
633: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
634: MatDestroyMatrices(osm->n_local_true, &osm->lmats);
635: }
637: PetscFree(osm->sub_mat_type);
639: osm->is = 0;
640: osm->is_local = 0;
641: return(0);
642: }
644: static PetscErrorCode PCDestroy_ASM(PC pc)
645: {
646: PC_ASM *osm = (PC_ASM*)pc->data;
648: PetscInt i;
651: PCReset_ASM(pc);
652: if (osm->ksp) {
653: for (i=0; i<osm->n_local_true; i++) {
654: KSPDestroy(&osm->ksp[i]);
655: }
656: PetscFree(osm->ksp);
657: }
658: PetscFree(pc->data);
659: return(0);
660: }
662: static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
663: {
664: PC_ASM *osm = (PC_ASM*)pc->data;
666: PetscInt blocks,ovl;
667: PetscBool flg;
668: PCASMType asmtype;
669: PCCompositeType loctype;
670: char sub_mat_type[256];
673: PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");
674: PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
675: PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
676: if (flg) {
677: PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
678: osm->dm_subdomains = PETSC_FALSE;
679: }
680: PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);
681: if (flg) {
682: PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);
683: osm->dm_subdomains = PETSC_FALSE;
684: }
685: PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
686: if (flg) {
687: PCASMSetOverlap(pc,ovl);
688: osm->dm_subdomains = PETSC_FALSE;
689: }
690: flg = PETSC_FALSE;
691: PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
692: if (flg) {PCASMSetType(pc,asmtype); }
693: flg = PETSC_FALSE;
694: PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);
695: if (flg) {PCASMSetLocalType(pc,loctype); }
696: PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);
697: if(flg){
698: PCASMSetSubMatType(pc,sub_mat_type);
699: }
700: PetscOptionsTail();
701: return(0);
702: }
704: /*------------------------------------------------------------------------------------*/
706: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
707: {
708: PC_ASM *osm = (PC_ASM*)pc->data;
710: PetscInt i;
713: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
714: if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
716: if (!pc->setupcalled) {
717: if (is) {
718: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
719: }
720: if (is_local) {
721: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
722: }
723: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
725: osm->n_local_true = n;
726: osm->is = 0;
727: osm->is_local = 0;
728: if (is) {
729: PetscMalloc1(n,&osm->is);
730: for (i=0; i<n; i++) osm->is[i] = is[i];
731: /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
732: osm->overlap = -1;
733: }
734: if (is_local) {
735: PetscMalloc1(n,&osm->is_local);
736: for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
737: if (!is) {
738: PetscMalloc1(osm->n_local_true,&osm->is);
739: for (i=0; i<osm->n_local_true; i++) {
740: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
741: ISDuplicate(osm->is_local[i],&osm->is[i]);
742: ISCopy(osm->is_local[i],osm->is[i]);
743: } else {
744: PetscObjectReference((PetscObject)osm->is_local[i]);
745: osm->is[i] = osm->is_local[i];
746: }
747: }
748: }
749: }
750: }
751: return(0);
752: }
754: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
755: {
756: PC_ASM *osm = (PC_ASM*)pc->data;
758: PetscMPIInt rank,size;
759: PetscInt n;
762: if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
763: if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
765: /*
766: Split the subdomains equally among all processors
767: */
768: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
769: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
770: n = N/size + ((N % size) > rank);
771: if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
772: if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
773: if (!pc->setupcalled) {
774: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
776: osm->n_local_true = n;
777: osm->is = 0;
778: osm->is_local = 0;
779: }
780: return(0);
781: }
783: static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
784: {
785: PC_ASM *osm = (PC_ASM*)pc->data;
788: if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
789: if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
790: if (!pc->setupcalled) osm->overlap = ovl;
791: return(0);
792: }
794: static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
795: {
796: PC_ASM *osm = (PC_ASM*)pc->data;
799: osm->type = type;
800: osm->type_set = PETSC_TRUE;
801: return(0);
802: }
804: static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type)
805: {
806: PC_ASM *osm = (PC_ASM*)pc->data;
809: *type = osm->type;
810: return(0);
811: }
813: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
814: {
815: PC_ASM *osm = (PC_ASM *) pc->data;
818: if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
819: osm->loctype = type;
820: return(0);
821: }
823: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
824: {
825: PC_ASM *osm = (PC_ASM *) pc->data;
828: *type = osm->loctype;
829: return(0);
830: }
832: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort)
833: {
834: PC_ASM *osm = (PC_ASM*)pc->data;
837: osm->sort_indices = doSort;
838: return(0);
839: }
841: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
842: {
843: PC_ASM *osm = (PC_ASM*)pc->data;
847: if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
849: if (n_local) *n_local = osm->n_local_true;
850: if (first_local) {
851: MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
852: *first_local -= osm->n_local_true;
853: }
854: if (ksp) {
855: /* Assume that local solves are now different; not necessarily
856: true though! This flag is used only for PCView_ASM() */
857: *ksp = osm->ksp;
858: osm->same_local_solves = PETSC_FALSE;
859: }
860: return(0);
861: }
863: static PetscErrorCode PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
864: {
865: PC_ASM *osm = (PC_ASM*)pc->data;
870: *sub_mat_type = osm->sub_mat_type;
871: return(0);
872: }
874: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
875: {
876: PetscErrorCode ierr;
877: PC_ASM *osm = (PC_ASM*)pc->data;
881: PetscFree(osm->sub_mat_type);
882: PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);
883: return(0);
884: }
886: /*@C
887: PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
889: Collective on pc
891: Input Parameters:
892: + pc - the preconditioner context
893: . n - the number of subdomains for this processor (default value = 1)
894: . is - the index set that defines the subdomains for this processor
895: (or NULL for PETSc to determine subdomains)
896: - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
897: (or NULL to not provide these)
899: Options Database Key:
900: To set the total number of subdomain blocks rather than specify the
901: index sets, use the option
902: . -pc_asm_local_blocks <blks> - Sets local blocks
904: Notes:
905: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
907: By default the ASM preconditioner uses 1 block per processor.
909: Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
911: If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
912: back to form the global solution (this is the standard restricted additive Schwarz method)
914: If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
915: no code to handle that case.
917: Level: advanced
919: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
920: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
921: @*/
922: PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
923: {
928: PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
929: return(0);
930: }
932: /*@C
933: PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
934: additive Schwarz preconditioner. Either all or no processors in the
935: PC communicator must call this routine, with the same index sets.
937: Collective on pc
939: Input Parameters:
940: + pc - the preconditioner context
941: . N - the number of subdomains for all processors
942: . is - the index sets that define the subdomains for all processors
943: (or NULL to ask PETSc to determine the subdomains)
944: - is_local - the index sets that define the local part of the subdomains for this processor
945: (or NULL to not provide this information)
947: Options Database Key:
948: To set the total number of subdomain blocks rather than specify the
949: index sets, use the option
950: . -pc_asm_blocks <blks> - Sets total blocks
952: Notes:
953: Currently you cannot use this to set the actual subdomains with the argument is or is_local.
955: By default the ASM preconditioner uses 1 block per processor.
957: These index sets cannot be destroyed until after completion of the
958: linear solves for which the ASM preconditioner is being used.
960: Use PCASMSetLocalSubdomains() to set local subdomains.
962: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
964: Level: advanced
966: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
967: PCASMCreateSubdomains2D()
968: @*/
969: PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
970: {
975: PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
976: return(0);
977: }
979: /*@
980: PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
981: additive Schwarz preconditioner. Either all or no processors in the
982: PC communicator must call this routine.
984: Logically Collective on pc
986: Input Parameters:
987: + pc - the preconditioner context
988: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
990: Options Database Key:
991: . -pc_asm_overlap <ovl> - Sets overlap
993: Notes:
994: By default the ASM preconditioner uses 1 block per processor. To use
995: multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
996: PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
998: The overlap defaults to 1, so if one desires that no additional
999: overlap be computed beyond what may have been set with a call to
1000: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1001: must be set to be 0. In particular, if one does not explicitly set
1002: the subdomains an application code, then all overlap would be computed
1003: internally by PETSc, and using an overlap of 0 would result in an ASM
1004: variant that is equivalent to the block Jacobi preconditioner.
1006: The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1007: use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1009: Note that one can define initial index sets with any overlap via
1010: PCASMSetLocalSubdomains(); the routine
1011: PCASMSetOverlap() merely allows PETSc to extend that overlap further
1012: if desired.
1014: Level: intermediate
1016: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1017: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1018: @*/
1019: PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
1020: {
1026: PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1027: return(0);
1028: }
1030: /*@
1031: PCASMSetType - Sets the type of restriction and interpolation used
1032: for local problems in the additive Schwarz method.
1034: Logically Collective on pc
1036: Input Parameters:
1037: + pc - the preconditioner context
1038: - type - variant of ASM, one of
1039: .vb
1040: PC_ASM_BASIC - full interpolation and restriction
1041: PC_ASM_RESTRICT - full restriction, local processor interpolation
1042: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1043: PC_ASM_NONE - local processor restriction and interpolation
1044: .ve
1046: Options Database Key:
1047: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1049: Notes:
1050: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1051: to limit the local processor interpolation
1053: Level: intermediate
1055: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1056: PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1057: @*/
1058: PetscErrorCode PCASMSetType(PC pc,PCASMType type)
1059: {
1065: PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1066: return(0);
1067: }
1069: /*@
1070: PCASMGetType - Gets the type of restriction and interpolation used
1071: for local problems in the additive Schwarz method.
1073: Logically Collective on pc
1075: Input Parameter:
1076: . pc - the preconditioner context
1078: Output Parameter:
1079: . type - variant of ASM, one of
1081: .vb
1082: PC_ASM_BASIC - full interpolation and restriction
1083: PC_ASM_RESTRICT - full restriction, local processor interpolation
1084: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1085: PC_ASM_NONE - local processor restriction and interpolation
1086: .ve
1088: Options Database Key:
1089: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1091: Level: intermediate
1093: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1094: PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1095: @*/
1096: PetscErrorCode PCASMGetType(PC pc,PCASMType *type)
1097: {
1102: PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1103: return(0);
1104: }
1106: /*@
1107: PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1109: Logically Collective on pc
1111: Input Parameters:
1112: + pc - the preconditioner context
1113: - type - type of composition, one of
1114: .vb
1115: PC_COMPOSITE_ADDITIVE - local additive combination
1116: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117: .ve
1119: Options Database Key:
1120: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1122: Level: intermediate
1124: .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1125: @*/
1126: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127: {
1133: PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1134: return(0);
1135: }
1137: /*@
1138: PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1140: Logically Collective on pc
1142: Input Parameter:
1143: . pc - the preconditioner context
1145: Output Parameter:
1146: . type - type of composition, one of
1147: .vb
1148: PC_COMPOSITE_ADDITIVE - local additive combination
1149: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1150: .ve
1152: Options Database Key:
1153: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1155: Level: intermediate
1157: .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1158: @*/
1159: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1160: {
1166: PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1167: return(0);
1168: }
1170: /*@
1171: PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1173: Logically Collective on pc
1175: Input Parameters:
1176: + pc - the preconditioner context
1177: - doSort - sort the subdomain indices
1179: Level: intermediate
1181: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1182: PCASMCreateSubdomains2D()
1183: @*/
1184: PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort)
1185: {
1191: PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1192: return(0);
1193: }
1195: /*@C
1196: PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1197: this processor.
1199: Collective on pc iff first_local is requested
1201: Input Parameter:
1202: . pc - the preconditioner context
1204: Output Parameters:
1205: + n_local - the number of blocks on this processor or NULL
1206: . first_local - the global number of the first block on this processor or NULL,
1207: all processors must request or all must pass NULL
1208: - ksp - the array of KSP contexts
1210: Note:
1211: After PCASMGetSubKSP() the array of KSPes is not to be freed.
1213: You must call KSPSetUp() before calling PCASMGetSubKSP().
1215: Fortran note:
1216: The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1218: Level: advanced
1220: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1221: PCASMCreateSubdomains2D(),
1222: @*/
1223: PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1224: {
1229: PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1230: return(0);
1231: }
1233: /* -------------------------------------------------------------------------------------*/
1234: /*MC
1235: PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1236: its own KSP object.
1238: Options Database Keys:
1239: + -pc_asm_blocks <blks> - Sets total blocks
1240: . -pc_asm_overlap <ovl> - Sets overlap
1241: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1242: - -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1244: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1245: will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1246: -pc_asm_type basic to use the standard ASM.
1248: Notes:
1249: Each processor can have one or more blocks, but a block cannot be shared by more
1250: than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1252: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1253: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1255: To set the options on the solvers separate for each block call PCASMGetSubKSP()
1256: and set the options directly on the resulting KSP object (you can access its PC
1257: with KSPGetPC())
1259: Level: beginner
1261: References:
1262: + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1263: Courant Institute, New York University Technical report
1264: - 2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1265: Cambridge University Press.
1267: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1268: PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1269: PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1271: M*/
1273: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1274: {
1276: PC_ASM *osm;
1279: PetscNewLog(pc,&osm);
1281: osm->n = PETSC_DECIDE;
1282: osm->n_local = 0;
1283: osm->n_local_true = PETSC_DECIDE;
1284: osm->overlap = 1;
1285: osm->ksp = 0;
1286: osm->restriction = 0;
1287: osm->lprolongation = 0;
1288: osm->lrestriction = 0;
1289: osm->x = 0;
1290: osm->y = 0;
1291: osm->is = 0;
1292: osm->is_local = 0;
1293: osm->mat = 0;
1294: osm->pmat = 0;
1295: osm->type = PC_ASM_RESTRICT;
1296: osm->loctype = PC_COMPOSITE_ADDITIVE;
1297: osm->same_local_solves = PETSC_TRUE;
1298: osm->sort_indices = PETSC_TRUE;
1299: osm->dm_subdomains = PETSC_FALSE;
1300: osm->sub_mat_type = NULL;
1302: pc->data = (void*)osm;
1303: pc->ops->apply = PCApply_ASM;
1304: pc->ops->applytranspose = PCApplyTranspose_ASM;
1305: pc->ops->setup = PCSetUp_ASM;
1306: pc->ops->reset = PCReset_ASM;
1307: pc->ops->destroy = PCDestroy_ASM;
1308: pc->ops->setfromoptions = PCSetFromOptions_ASM;
1309: pc->ops->setuponblocks = PCSetUpOnBlocks_ASM;
1310: pc->ops->view = PCView_ASM;
1311: pc->ops->applyrichardson = 0;
1313: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1314: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1315: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1316: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1317: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);
1318: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);
1319: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);
1320: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1321: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1322: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);
1323: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);
1324: return(0);
1325: }
1327: /*@C
1328: PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1329: preconditioner for a any problem on a general grid.
1331: Collective
1333: Input Parameters:
1334: + A - The global matrix operator
1335: - n - the number of local blocks
1337: Output Parameters:
1338: . outis - the array of index sets defining the subdomains
1340: Level: advanced
1342: Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1343: from these if you use PCASMSetLocalSubdomains()
1345: In the Fortran version you must provide the array outis[] already allocated of length n.
1347: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1348: @*/
1349: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1350: {
1351: MatPartitioning mpart;
1352: const char *prefix;
1353: PetscInt i,j,rstart,rend,bs;
1354: PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1355: Mat Ad = NULL, adj;
1356: IS ispart,isnumb,*is;
1357: PetscErrorCode ierr;
1362: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1364: /* Get prefix, row distribution, and block size */
1365: MatGetOptionsPrefix(A,&prefix);
1366: MatGetOwnershipRange(A,&rstart,&rend);
1367: MatGetBlockSize(A,&bs);
1368: if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1370: /* Get diagonal block from matrix if possible */
1371: MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);
1372: if (hasop) {
1373: MatGetDiagonalBlock(A,&Ad);
1374: }
1375: if (Ad) {
1376: PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1377: if (!isbaij) {PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1378: }
1379: if (Ad && n > 1) {
1380: PetscBool match,done;
1381: /* Try to setup a good matrix partitioning if available */
1382: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1383: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1384: MatPartitioningSetFromOptions(mpart);
1385: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1386: if (!match) {
1387: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1388: }
1389: if (!match) { /* assume a "good" partitioner is available */
1390: PetscInt na;
1391: const PetscInt *ia,*ja;
1392: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1393: if (done) {
1394: /* Build adjacency matrix by hand. Unfortunately a call to
1395: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1396: remove the block-aij structure and we cannot expect
1397: MatPartitioning to split vertices as we need */
1398: PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1399: const PetscInt *row;
1400: nnz = 0;
1401: for (i=0; i<na; i++) { /* count number of nonzeros */
1402: len = ia[i+1] - ia[i];
1403: row = ja + ia[i];
1404: for (j=0; j<len; j++) {
1405: if (row[j] == i) { /* don't count diagonal */
1406: len--; break;
1407: }
1408: }
1409: nnz += len;
1410: }
1411: PetscMalloc1(na+1,&iia);
1412: PetscMalloc1(nnz,&jja);
1413: nnz = 0;
1414: iia[0] = 0;
1415: for (i=0; i<na; i++) { /* fill adjacency */
1416: cnt = 0;
1417: len = ia[i+1] - ia[i];
1418: row = ja + ia[i];
1419: for (j=0; j<len; j++) {
1420: if (row[j] != i) { /* if not diagonal */
1421: jja[nnz+cnt++] = row[j];
1422: }
1423: }
1424: nnz += cnt;
1425: iia[i+1] = nnz;
1426: }
1427: /* Partitioning of the adjacency matrix */
1428: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1429: MatPartitioningSetAdjacency(mpart,adj);
1430: MatPartitioningSetNParts(mpart,n);
1431: MatPartitioningApply(mpart,&ispart);
1432: ISPartitioningToNumbering(ispart,&isnumb);
1433: MatDestroy(&adj);
1434: foundpart = PETSC_TRUE;
1435: }
1436: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1437: }
1438: MatPartitioningDestroy(&mpart);
1439: }
1441: PetscMalloc1(n,&is);
1442: *outis = is;
1444: if (!foundpart) {
1446: /* Partitioning by contiguous chunks of rows */
1448: PetscInt mbs = (rend-rstart)/bs;
1449: PetscInt start = rstart;
1450: for (i=0; i<n; i++) {
1451: PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1452: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1453: start += count;
1454: }
1456: } else {
1458: /* Partitioning by adjacency of diagonal block */
1460: const PetscInt *numbering;
1461: PetscInt *count,nidx,*indices,*newidx,start=0;
1462: /* Get node count in each partition */
1463: PetscMalloc1(n,&count);
1464: ISPartitioningCount(ispart,n,count);
1465: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1466: for (i=0; i<n; i++) count[i] *= bs;
1467: }
1468: /* Build indices from node numbering */
1469: ISGetLocalSize(isnumb,&nidx);
1470: PetscMalloc1(nidx,&indices);
1471: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1472: ISGetIndices(isnumb,&numbering);
1473: PetscSortIntWithPermutation(nidx,numbering,indices);
1474: ISRestoreIndices(isnumb,&numbering);
1475: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476: PetscMalloc1(nidx*bs,&newidx);
1477: for (i=0; i<nidx; i++) {
1478: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1479: }
1480: PetscFree(indices);
1481: nidx *= bs;
1482: indices = newidx;
1483: }
1484: /* Shift to get global indices */
1485: for (i=0; i<nidx; i++) indices[i] += rstart;
1487: /* Build the index sets for each block */
1488: for (i=0; i<n; i++) {
1489: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1490: ISSort(is[i]);
1491: start += count[i];
1492: }
1494: PetscFree(count);
1495: PetscFree(indices);
1496: ISDestroy(&isnumb);
1497: ISDestroy(&ispart);
1499: }
1500: return(0);
1501: }
1503: /*@C
1504: PCASMDestroySubdomains - Destroys the index sets created with
1505: PCASMCreateSubdomains(). Should be called after setting subdomains
1506: with PCASMSetLocalSubdomains().
1508: Collective
1510: Input Parameters:
1511: + n - the number of index sets
1512: . is - the array of index sets
1513: - is_local - the array of local index sets, can be NULL
1515: Level: advanced
1517: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1518: @*/
1519: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1520: {
1521: PetscInt i;
1525: if (n <= 0) return(0);
1526: if (is) {
1528: for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1529: PetscFree(is);
1530: }
1531: if (is_local) {
1533: for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1534: PetscFree(is_local);
1535: }
1536: return(0);
1537: }
1539: /*@
1540: PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1541: preconditioner for a two-dimensional problem on a regular grid.
1543: Not Collective
1545: Input Parameters:
1546: + m, n - the number of mesh points in the x and y directions
1547: . M, N - the number of subdomains in the x and y directions
1548: . dof - degrees of freedom per node
1549: - overlap - overlap in mesh lines
1551: Output Parameters:
1552: + Nsub - the number of subdomains created
1553: . is - array of index sets defining overlapping (if overlap > 0) subdomains
1554: - is_local - array of index sets defining non-overlapping subdomains
1556: Note:
1557: Presently PCAMSCreateSubdomains2d() is valid only for sequential
1558: preconditioners. More general related routines are
1559: PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1561: Level: advanced
1563: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1564: PCASMSetOverlap()
1565: @*/
1566: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1567: {
1568: PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1570: PetscInt nidx,*idx,loc,ii,jj,count;
1573: if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1575: *Nsub = N*M;
1576: PetscMalloc1(*Nsub,is);
1577: PetscMalloc1(*Nsub,is_local);
1578: ystart = 0;
1579: loc_outer = 0;
1580: for (i=0; i<N; i++) {
1581: height = n/N + ((n % N) > i); /* height of subdomain */
1582: if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1583: yleft = ystart - overlap; if (yleft < 0) yleft = 0;
1584: yright = ystart + height + overlap; if (yright > n) yright = n;
1585: xstart = 0;
1586: for (j=0; j<M; j++) {
1587: width = m/M + ((m % M) > j); /* width of subdomain */
1588: if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1589: xleft = xstart - overlap; if (xleft < 0) xleft = 0;
1590: xright = xstart + width + overlap; if (xright > m) xright = m;
1591: nidx = (xright - xleft)*(yright - yleft);
1592: PetscMalloc1(nidx,&idx);
1593: loc = 0;
1594: for (ii=yleft; ii<yright; ii++) {
1595: count = m*ii + xleft;
1596: for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1597: }
1598: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1599: if (overlap == 0) {
1600: PetscObjectReference((PetscObject)(*is)[loc_outer]);
1602: (*is_local)[loc_outer] = (*is)[loc_outer];
1603: } else {
1604: for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1605: for (jj=xstart; jj<xstart+width; jj++) {
1606: idx[loc++] = m*ii + jj;
1607: }
1608: }
1609: ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1610: }
1611: PetscFree(idx);
1612: xstart += width;
1613: loc_outer++;
1614: }
1615: ystart += height;
1616: }
1617: for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1618: return(0);
1619: }
1621: /*@C
1622: PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1623: only) for the additive Schwarz preconditioner.
1625: Not Collective
1627: Input Parameter:
1628: . pc - the preconditioner context
1630: Output Parameters:
1631: + n - the number of subdomains for this processor (default value = 1)
1632: . is - the index sets that define the subdomains for this processor
1633: - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1636: Notes:
1637: The IS numbering is in the parallel, global numbering of the vector.
1639: Level: advanced
1641: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1642: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1643: @*/
1644: PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1645: {
1646: PC_ASM *osm = (PC_ASM*)pc->data;
1648: PetscBool match;
1654: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1655: if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1656: if (n) *n = osm->n_local_true;
1657: if (is) *is = osm->is;
1658: if (is_local) *is_local = osm->is_local;
1659: return(0);
1660: }
1662: /*@C
1663: PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1664: only) for the additive Schwarz preconditioner.
1666: Not Collective
1668: Input Parameter:
1669: . pc - the preconditioner context
1671: Output Parameters:
1672: + n - the number of matrices for this processor (default value = 1)
1673: - mat - the matrices
1675: Level: advanced
1677: Notes:
1678: Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1680: Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1682: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1683: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1684: @*/
1685: PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1686: {
1687: PC_ASM *osm;
1689: PetscBool match;
1695: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1696: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1697: if (!match) {
1698: if (n) *n = 0;
1699: if (mat) *mat = NULL;
1700: } else {
1701: osm = (PC_ASM*)pc->data;
1702: if (n) *n = osm->n_local_true;
1703: if (mat) *mat = osm->pmat;
1704: }
1705: return(0);
1706: }
1708: /*@
1709: PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1711: Logically Collective
1713: Input Parameter:
1714: + pc - the preconditioner
1715: - flg - boolean indicating whether to use subdomains defined by the DM
1717: Options Database Key:
1718: . -pc_asm_dm_subdomains
1720: Level: intermediate
1722: Notes:
1723: PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1724: so setting either of the first two effectively turns the latter off.
1726: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1727: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1728: @*/
1729: PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg)
1730: {
1731: PC_ASM *osm = (PC_ASM*)pc->data;
1733: PetscBool match;
1738: if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1739: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1740: if (match) {
1741: osm->dm_subdomains = flg;
1742: }
1743: return(0);
1744: }
1746: /*@
1747: PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1748: Not Collective
1750: Input Parameter:
1751: . pc - the preconditioner
1753: Output Parameter:
1754: . flg - boolean indicating whether to use subdomains defined by the DM
1756: Level: intermediate
1758: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1759: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1760: @*/
1761: PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1762: {
1763: PC_ASM *osm = (PC_ASM*)pc->data;
1765: PetscBool match;
1770: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1771: if (match) {
1772: if (flg) *flg = osm->dm_subdomains;
1773: }
1774: return(0);
1775: }
1777: /*@
1778: PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1780: Not Collective
1782: Input Parameter:
1783: . pc - the PC
1785: Output Parameter:
1786: . -pc_asm_sub_mat_type - name of matrix type
1788: Level: advanced
1790: .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1791: @*/
1792: PetscErrorCode PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1795: PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1796: return(0);
1797: }
1799: /*@
1800: PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1802: Collective on Mat
1804: Input Parameters:
1805: + pc - the PC object
1806: - sub_mat_type - matrix type
1808: Options Database Key:
1809: . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1811: Notes:
1812: See "${PETSC_DIR}/include/petscmat.h" for available types
1814: Level: advanced
1816: .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1817: @*/
1818: PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1819: {
1822: PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1823: return(0);
1824: }