Actual source code: precon.c

  1: #define PETSCKSP_DLL

  3: /*
  4:     The PC (preconditioner) interface routines, callable by users.
  5: */
 6:  #include private/pcimpl.h

  8: /* Logging support */
  9: PetscCookie    PC_COOKIE;
 10: PetscLogEvent  PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
 11: PetscLogEvent  PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;

 15: PetscErrorCode PCGetDefaultType_Private(PC pc,const char* type[])
 16: {
 18:   PetscMPIInt    size;
 19:   PetscTruth     flg1,flg2,set,flg3;

 22:   MPI_Comm_size(((PetscObject)pc)->comm,&size);
 23:   if (pc->pmat) {
 24:     PetscErrorCode (*f)(Mat,PetscTruth*,MatReuse,Mat*);
 25:     PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);
 26:     if (size == 1) {
 27:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
 28:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
 29:       MatIsSymmetricKnown(pc->pmat,&set,&flg3);
 30:       if (flg1 && (!flg2 || (set && flg3))) {
 31:         *type = PCICC;
 32:       } else if (flg2) {
 33:         *type = PCILU;
 34:       } else if (f) { /* likely is a parallel matrix run on one processor */
 35:         *type = PCBJACOBI;
 36:       } else {
 37:         *type = PCNONE;
 38:       }
 39:     } else {
 40:        if (f) {
 41:         *type = PCBJACOBI;
 42:       } else {
 43:         *type = PCNONE;
 44:       }
 45:     }
 46:   } else {
 47:     if (size == 1) {
 48:       *type = PCILU;
 49:     } else {
 50:       *type = PCBJACOBI;
 51:     }
 52:   }
 53:   return(0);
 54: }

 58: /*@
 59:    PCDestroy - Destroys PC context that was created with PCCreate().

 61:    Collective on PC

 63:    Input Parameter:
 64: .  pc - the preconditioner context

 66:    Level: developer

 68: .keywords: PC, destroy

 70: .seealso: PCCreate(), PCSetUp()
 71: @*/
 72: PetscErrorCode  PCDestroy(PC pc)
 73: {

 78:   if (--((PetscObject)pc)->refct > 0) return(0);

 80:   /* if memory was published with AMS then destroy it */
 81:   PetscObjectDepublish(pc);

 83:   if (pc->ops->destroy)       { (*pc->ops->destroy)(pc);}
 84:   if (pc->diagonalscaleright) {VecDestroy(pc->diagonalscaleright);}
 85:   if (pc->diagonalscaleleft)  {VecDestroy(pc->diagonalscaleleft);}

 87:   if (pc->pmat) {MatDestroy(pc->pmat);}
 88:   if (pc->mat) {MatDestroy(pc->mat);}

 90:   PetscHeaderDestroy(pc);
 91:   return(0);
 92: }

 96: /*@C
 97:    PCDiagonalScale - Indicates if the preconditioner applies an additional left and right
 98:       scaling as needed by certain time-stepping codes.

100:    Collective on PC

102:    Input Parameter:
103: .  pc - the preconditioner context

105:    Output Parameter:
106: .  flag - PETSC_TRUE if it applies the scaling

108:    Level: developer

110:    Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is
111: $           D M A D^{-1} y = D M b  for left preconditioning or
112: $           D A M D^{-1} z = D b for right preconditioning

114: .keywords: PC

116: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCDiagonalScaleSet()
117: @*/
118: PetscErrorCode  PCDiagonalScale(PC pc,PetscTruth *flag)
119: {
123:   *flag = pc->diagonalscale;
124:   return(0);
125: }

129: /*@
130:    PCDiagonalScaleSet - Indicates the left scaling to use to apply an additional left and right
131:       scaling as needed by certain time-stepping codes.

133:    Collective on PC

135:    Input Parameters:
136: +  pc - the preconditioner context
137: -  s - scaling vector

139:    Level: intermediate

141:    Notes: The system solved via the Krylov method is
142: $           D M A D^{-1} y = D M b  for left preconditioning or
143: $           D A M D^{-1} z = D b for right preconditioning

145:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

147: .keywords: PC

149: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCDiagonalScale()
150: @*/
151: PetscErrorCode  PCDiagonalScaleSet(PC pc,Vec s)
152: {

158:   pc->diagonalscale     = PETSC_TRUE;
159:   PetscObjectReference((PetscObject)s);
160:   if (pc->diagonalscaleleft) {
161:     VecDestroy(pc->diagonalscaleleft);
162:   }
163:   pc->diagonalscaleleft = s;
164:   if (!pc->diagonalscaleright) {
165:     VecDuplicate(s,&pc->diagonalscaleright);
166:   }
167:   VecCopy(s,pc->diagonalscaleright);
168:   VecReciprocal(pc->diagonalscaleright);
169:   return(0);
170: }

174: /*@
175:    PCDiagonalScaleLeft - Indicates the left scaling to use to apply an additional left and right
176:       scaling as needed by certain time-stepping codes.

178:    Collective on PC

180:    Input Parameters:
181: +  pc - the preconditioner context
182: .  in - input vector
183: +  out - scaled vector (maybe the same as in)

185:    Level: intermediate

187:    Notes: The system solved via the Krylov method is
188: $           D M A D^{-1} y = D M b  for left preconditioning or
189: $           D A M D^{-1} z = D b for right preconditioning

191:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

193:    If diagonal scaling is turned off and in is not out then in is copied to out

195: .keywords: PC

197: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
198: @*/
199: PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
200: {

207:   if (pc->diagonalscale) {
208:     VecPointwiseMult(out,pc->diagonalscaleleft,in);
209:   } else if (in != out) {
210:     VecCopy(in,out);
211:   }
212:   return(0);
213: }

217: /*@
218:    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.

220:    Collective on PC

222:    Input Parameters:
223: +  pc - the preconditioner context
224: .  in - input vector
225: +  out - scaled vector (maybe the same as in)

227:    Level: intermediate

229:    Notes: The system solved via the Krylov method is
230: $           D M A D^{-1} y = D M b  for left preconditioning or
231: $           D A M D^{-1} z = D b for right preconditioning

233:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

235:    If diagonal scaling is turned off and in is not out then in is copied to out

237: .keywords: PC

239: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
240: @*/
241: PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
242: {

249:   if (pc->diagonalscale) {
250:     VecPointwiseMult(out,pc->diagonalscaleright,in);
251:   } else if (in != out) {
252:     VecCopy(in,out);
253:   }
254:   return(0);
255: }

257: #if 0
260: static PetscErrorCode PCPublish_Petsc(PetscObject obj)
261: {
263:   return(0);
264: }
265: #endif

269: /*@
270:    PCCreate - Creates a preconditioner context.

272:    Collective on MPI_Comm

274:    Input Parameter:
275: .  comm - MPI communicator 

277:    Output Parameter:
278: .  pc - location to put the preconditioner context

280:    Notes:
281:    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC 
282:    in parallel. For dense matrices it is always PCNONE.

284:    Level: developer

286: .keywords: PC, create, context

288: .seealso: PCSetUp(), PCApply(), PCDestroy()
289: @*/
290: PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
291: {
292:   PC             pc;

297:   *newpc = 0;
298: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
299:   PCInitializePackage(PETSC_NULL);
300: #endif

302:   PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_COOKIE,-1,"PC",comm,PCDestroy,PCView);

304:   pc->mat                  = 0;
305:   pc->pmat                 = 0;
306:   pc->setupcalled          = 0;
307:   pc->setfromoptionscalled = 0;
308:   pc->data                 = 0;
309:   pc->diagonalscale        = PETSC_FALSE;
310:   pc->diagonalscaleleft    = 0;
311:   pc->diagonalscaleright   = 0;

313:   pc->modifysubmatrices   = 0;
314:   pc->modifysubmatricesP  = 0;
315:   PetscPublishAll(pc);
316:   *newpc = pc;
317:   return(0);

319: }

321: /* -------------------------------------------------------------------------------*/

325: /*@
326:    PCApply - Applies the preconditioner to a vector.

328:    Collective on PC and Vec

330:    Input Parameters:
331: +  pc - the preconditioner context
332: -  x - input vector

334:    Output Parameter:
335: .  y - output vector

337:    Level: developer

339: .keywords: PC, apply

341: .seealso: PCApplyTranspose(), PCApplyBAorAB()
342: @*/
343: PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
344: {

351:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
352:   if (pc->setupcalled < 2) {
353:     PCSetUp(pc);
354:   }
355:   if (!pc->ops->apply) SETERRQ(PETSC_ERR_SUP,"PC does not have apply");
356:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
357:   (*pc->ops->apply)(pc,x,y);
358:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
359:   return(0);
360: }

364: /*@
365:    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.

367:    Collective on PC and Vec

369:    Input Parameters:
370: +  pc - the preconditioner context
371: -  x - input vector

373:    Output Parameter:
374: .  y - output vector

376:    Notes:
377:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

379:    Level: developer

381: .keywords: PC, apply, symmetric, left

383: .seealso: PCApply(), PCApplySymmetricRight()
384: @*/
385: PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
386: {

393:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
394:   if (pc->setupcalled < 2) {
395:     PCSetUp(pc);
396:   }
397:   if (!pc->ops->applysymmetricleft) SETERRQ(PETSC_ERR_SUP,"PC does not have left symmetric apply");
398:   PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
399:   (*pc->ops->applysymmetricleft)(pc,x,y);
400:   PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
401:   return(0);
402: }

406: /*@
407:    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.

409:    Collective on PC and Vec

411:    Input Parameters:
412: +  pc - the preconditioner context
413: -  x - input vector

415:    Output Parameter:
416: .  y - output vector

418:    Level: developer

420:    Notes:
421:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

423: .keywords: PC, apply, symmetric, right

425: .seealso: PCApply(), PCApplySymmetricLeft()
426: @*/
427: PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
428: {

435:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
436:   if (pc->setupcalled < 2) {
437:     PCSetUp(pc);
438:   }
439:   if (!pc->ops->applysymmetricright) SETERRQ(PETSC_ERR_SUP,"PC does not have left symmetric apply");
440:   PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
441:   (*pc->ops->applysymmetricright)(pc,x,y);
442:   PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
443:   return(0);
444: }

448: /*@
449:    PCApplyTranspose - Applies the transpose of preconditioner to a vector.

451:    Collective on PC and Vec

453:    Input Parameters:
454: +  pc - the preconditioner context
455: -  x - input vector

457:    Output Parameter:
458: .  y - output vector

460:    Level: developer

462: .keywords: PC, apply, transpose

464: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
465: @*/
466: PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
467: {

474:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
475:   if (pc->setupcalled < 2) {
476:     PCSetUp(pc);
477:   }
478:   if (!pc->ops->applytranspose) SETERRQ(PETSC_ERR_SUP,"PC does not have apply transpose");
479:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
480:   (*pc->ops->applytranspose)(pc,x,y);
481:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
482:   return(0);
483: }

487: /*@
488:    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation

490:    Collective on PC and Vec

492:    Input Parameters:
493: .  pc - the preconditioner context

495:    Output Parameter:
496: .  flg - PETSC_TRUE if a transpose operation is defined

498:    Level: developer

500: .keywords: PC, apply, transpose

502: .seealso: PCApplyTranspose()
503: @*/
504: PetscErrorCode  PCApplyTransposeExists(PC pc,PetscTruth *flg)
505: {
509:   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
510:   else                         *flg = PETSC_FALSE;
511:   return(0);
512: }

516: /*@
517:    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.

519:    Collective on PC and Vec

521:    Input Parameters:
522: +  pc - the preconditioner context
523: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
524: .  x - input vector
525: -  work - work vector

527:    Output Parameter:
528: .  y - output vector

530:    Level: developer

532: .keywords: PC, apply, operator

534: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
535: @*/
536: PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
537: {

545:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
546:   if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) {
547:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
548:   }
549:   if (pc->diagonalscale && side == PC_SYMMETRIC) {
550:     SETERRQ(PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
551:   }

553:   if (pc->setupcalled < 2) {
554:     PCSetUp(pc);
555:   }

557:   if (pc->diagonalscale) {
558:     if (pc->ops->applyBA) {
559:       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
560:       VecDuplicate(x,&work2);
561:       PCDiagonalScaleRight(pc,x,work2);
562:       (*pc->ops->applyBA)(pc,side,work2,y,work);
563:       PCDiagonalScaleLeft(pc,y,y);
564:       VecDestroy(work2);
565:     } else if (side == PC_RIGHT) {
566:       PCDiagonalScaleRight(pc,x,y);
567:       PCApply(pc,y,work);
568:       MatMult(pc->mat,work,y);
569:       PCDiagonalScaleLeft(pc,y,y);
570:     } else if (side == PC_LEFT) {
571:       PCDiagonalScaleRight(pc,x,y);
572:       MatMult(pc->mat,y,work);
573:       PCApply(pc,work,y);
574:       PCDiagonalScaleLeft(pc,y,y);
575:     } else if (side == PC_SYMMETRIC) {
576:       SETERRQ(PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
577:     }
578:   } else {
579:     if (pc->ops->applyBA) {
580:       (*pc->ops->applyBA)(pc,side,x,y,work);
581:     } else if (side == PC_RIGHT) {
582:       PCApply(pc,x,work);
583:       MatMult(pc->mat,work,y);
584:     } else if (side == PC_LEFT) {
585:       MatMult(pc->mat,x,work);
586:       PCApply(pc,work,y);
587:     } else if (side == PC_SYMMETRIC) {
588:       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
589:       PCApplySymmetricRight(pc,x,work);
590:       MatMult(pc->mat,work,y);
591:       VecCopy(y,work);
592:       PCApplySymmetricLeft(pc,work,y);
593:     }
594:   }
595:   return(0);
596: }

600: /*@ 
601:    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
602:    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
603:    NOT tr(B*A) = tr(A)*tr(B).

605:    Collective on PC and Vec

607:    Input Parameters:
608: +  pc - the preconditioner context
609: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
610: .  x - input vector
611: -  work - work vector

613:    Output Parameter:
614: .  y - output vector


617:    Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
618:       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 
619:           
620:     Level: developer

622: .keywords: PC, apply, operator, transpose

624: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
625: @*/
626: PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
627: {

635:   if (x == y) SETERRQ(PETSC_ERR_ARG_IDN,"x and y must be different vectors");
636:   if (pc->ops->applyBAtranspose) {
637:     (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
638:     return(0);
639:   }
640:   if (side != PC_LEFT && side != PC_RIGHT) {
641:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
642:   }

644:   if (pc->setupcalled < 2) {
645:     PCSetUp(pc);
646:   }

648:   if (side == PC_RIGHT) {
649:     PCApplyTranspose(pc,x,work);
650:     MatMultTranspose(pc->mat,work,y);
651:   } else if (side == PC_LEFT) {
652:     MatMultTranspose(pc->mat,x,work);
653:     PCApplyTranspose(pc,work,y);
654:   }
655:   /* add support for PC_SYMMETRIC */
656:   return(0); /* actually will never get here */
657: }

659: /* -------------------------------------------------------------------------------*/

663: /*@
664:    PCApplyRichardsonExists - Determines whether a particular preconditioner has a 
665:    built-in fast application of Richardson's method.

667:    Not Collective

669:    Input Parameter:
670: .  pc - the preconditioner

672:    Output Parameter:
673: .  exists - PETSC_TRUE or PETSC_FALSE

675:    Level: developer

677: .keywords: PC, apply, Richardson, exists

679: .seealso: PCApplyRichardson()
680: @*/
681: PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscTruth *exists)
682: {
686:   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
687:   else                          *exists = PETSC_FALSE;
688:   return(0);
689: }

693: /*@
694:    PCApplyRichardson - Applies several steps of Richardson iteration with 
695:    the particular preconditioner. This routine is usually used by the 
696:    Krylov solvers and not the application code directly.

698:    Collective on PC

700:    Input Parameters:
701: +  pc  - the preconditioner context
702: .  b   - the right hand side
703: .  w   - one work vector
704: .  rtol - relative decrease in residual norm convergence criteria
705: .  abstol - absolute residual norm convergence criteria
706: .  dtol - divergence residual norm increase criteria
707: .  its - the number of iterations to apply.
708: -  guesszero - if the input x contains nonzero initial guess

710:    Output Parameter:
711: +  outits - number of iterations actually used (for SOR this always equals its)
712: .  reason - the reason the apply terminated
713: -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE

715:    Notes: 
716:    Most preconditioners do not support this function. Use the command
717:    PCApplyRichardsonExists() to determine if one does.

719:    Except for the multigrid PC this routine ignores the convergence tolerances
720:    and always runs for the number of iterations
721:  
722:    Level: developer

724: .keywords: PC, apply, Richardson

726: .seealso: PCApplyRichardsonExists()
727: @*/
728: PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscTruth guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
729: {

737:   if (b == y) SETERRQ(PETSC_ERR_ARG_IDN,"b and y must be different vectors");
738:   if (pc->setupcalled < 2) {
739:     PCSetUp(pc);
740:   }
741:   if (!pc->ops->applyrichardson) SETERRQ(PETSC_ERR_SUP,"PC does not have apply richardson");
742:   (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
743:   return(0);
744: }

746: /* 
747:       a setupcall of 0 indicates never setup, 
748:                      1 needs to be resetup,
749:                      2 does not need any changes.
750: */
753: /*@
754:    PCSetUp - Prepares for the use of a preconditioner.

756:    Collective on PC

758:    Input Parameter:
759: .  pc - the preconditioner context

761:    Level: developer

763: .keywords: PC, setup

765: .seealso: PCCreate(), PCApply(), PCDestroy()
766: @*/
767: PetscErrorCode  PCSetUp(PC pc)
768: {
770:   const char     *def;


775:   if (!pc->mat) {SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");}

777:   if (pc->setupcalled > 1) {
778:     PetscInfo(pc,"Setting PC with identical preconditioner\n");
779:     return(0);
780:   } else if (!pc->setupcalled) {
781:     PetscInfo(pc,"Setting up new PC\n");
782:   } else if (pc->flag == SAME_NONZERO_PATTERN) {
783:     PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
784:   } else {
785:     PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
786:   }

788:   if (!((PetscObject)pc)->type_name) {
789:     PCGetDefaultType_Private(pc,&def);
790:     PCSetType(pc,def);
791:   }

793:   PetscLogEventBegin(PC_SetUp,pc,0,0,0);
794:   if (pc->ops->setup) {
795:     (*pc->ops->setup)(pc);
796:   }
797:   pc->setupcalled = 2;
798:   PetscLogEventEnd(PC_SetUp,pc,0,0,0);
799:   return(0);
800: }

804: /*@
805:    PCSetUpOnBlocks - Sets up the preconditioner for each block in
806:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 
807:    methods.

809:    Collective on PC

811:    Input Parameters:
812: .  pc - the preconditioner context

814:    Level: developer

816: .keywords: PC, setup, blocks

818: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
819: @*/
820: PetscErrorCode  PCSetUpOnBlocks(PC pc)
821: {

826:   if (!pc->ops->setuponblocks) return(0);
827:   PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
828:   (*pc->ops->setuponblocks)(pc);
829:   PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
830:   return(0);
831: }

835: /*@C
836:    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
837:    submatrices that arise within certain subdomain-based preconditioners.
838:    The basic submatrices are extracted from the preconditioner matrix as
839:    usual; the user can then alter these (for example, to set different boundary
840:    conditions for each submatrix) before they are used for the local solves.

842:    Collective on PC

844:    Input Parameters:
845: +  pc - the preconditioner context
846: .  func - routine for modifying the submatrices
847: -  ctx - optional user-defined context (may be null)

849:    Calling sequence of func:
850: $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);

852: .  row - an array of index sets that contain the global row numbers
853:          that comprise each local submatrix
854: .  col - an array of index sets that contain the global column numbers
855:          that comprise each local submatrix
856: .  submat - array of local submatrices
857: -  ctx - optional user-defined context for private data for the 
858:          user-defined func routine (may be null)

860:    Notes:
861:    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
862:    KSPSolve().

864:    A routine set by PCSetModifySubMatrices() is currently called within
865:    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
866:    preconditioners.  All other preconditioners ignore this routine.

868:    Level: advanced

870: .keywords: PC, set, modify, submatrices

872: .seealso: PCModifySubMatrices()
873: @*/
874: PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
875: {
878:   pc->modifysubmatrices  = func;
879:   pc->modifysubmatricesP = ctx;
880:   return(0);
881: }

885: /*@C
886:    PCModifySubMatrices - Calls an optional user-defined routine within 
887:    certain preconditioners if one has been set with PCSetModifySubMarices().

889:    Collective on PC

891:    Input Parameters:
892: +  pc - the preconditioner context
893: .  nsub - the number of local submatrices
894: .  row - an array of index sets that contain the global row numbers
895:          that comprise each local submatrix
896: .  col - an array of index sets that contain the global column numbers
897:          that comprise each local submatrix
898: .  submat - array of local submatrices
899: -  ctx - optional user-defined context for private data for the 
900:          user-defined routine (may be null)

902:    Output Parameter:
903: .  submat - array of local submatrices (the entries of which may
904:             have been modified)

906:    Notes:
907:    The user should NOT generally call this routine, as it will
908:    automatically be called within certain preconditioners (currently
909:    block Jacobi, additive Schwarz) if set.

911:    The basic submatrices are extracted from the preconditioner matrix
912:    as usual; the user can then alter these (for example, to set different
913:    boundary conditions for each submatrix) before they are used for the
914:    local solves.

916:    Level: developer

918: .keywords: PC, modify, submatrices

920: .seealso: PCSetModifySubMatrices()
921: @*/
922: PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
923: {

928:   if (!pc->modifysubmatrices) return(0);
929:   PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
930:   (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
931:   PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
932:   return(0);
933: }

937: /*@
938:    PCSetOperators - Sets the matrix associated with the linear system and 
939:    a (possibly) different one associated with the preconditioner.

941:    Collective on PC and Mat

943:    Input Parameters:
944: +  pc - the preconditioner context
945: .  Amat - the matrix associated with the linear system
946: .  Pmat - the matrix to be used in constructing the preconditioner, usually
947:           the same as Amat. 
948: -  flag - flag indicating information about the preconditioner matrix structure
949:    during successive linear solves.  This flag is ignored the first time a
950:    linear system is solved, and thus is irrelevant when solving just one linear
951:    system.

953:    Notes: 
954:    The flag can be used to eliminate unnecessary work in the preconditioner 
955:    during the repeated solution of linear systems of the same size.  The 
956:    available options are
957: +    SAME_PRECONDITIONER -
958:        Pmat is identical during successive linear solves.
959:        This option is intended for folks who are using
960:        different Amat and Pmat matrices and wish to reuse the
961:        same preconditioner matrix.  For example, this option
962:        saves work by not recomputing incomplete factorization
963:        for ILU/ICC preconditioners.
964: .     SAME_NONZERO_PATTERN -
965:        Pmat has the same nonzero structure during
966:        successive linear solves. 
967: -     DIFFERENT_NONZERO_PATTERN -
968:        Pmat does not have the same nonzero structure.

970:     Passing a PETSC_NULL for Amat or Pmat removes the matrix that is currently used.

972:     If you wish to replace either Amat or Pmat but leave the other one untouched then
973:     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
974:     on it and then pass it back in in your call to KSPSetOperators().

976:    Caution:
977:    If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion
978:    and does not check the structure of the matrix.  If you erroneously
979:    claim that the structure is the same when it actually is not, the new
980:    preconditioner will not function correctly.  Thus, use this optimization
981:    feature carefully!

983:    If in doubt about whether your preconditioner matrix has changed
984:    structure or not, use the flag DIFFERENT_NONZERO_PATTERN.

986:    More Notes about Repeated Solution of Linear Systems:
987:    PETSc does NOT reset the matrix entries of either Amat or Pmat
988:    to zero after a linear solve; the user is completely responsible for
989:    matrix assembly.  See the routine MatZeroEntries() if desiring to
990:    zero all elements of a matrix.

992:    Level: intermediate

994: .keywords: PC, set, operators, matrix, linear system

996: .seealso: PCGetOperators(), MatZeroEntries()
997:  @*/
998: PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag)
999: {


1009:   /* reference first in case the matrices are the same */
1010:   if (Amat) {PetscObjectReference((PetscObject)Amat);}
1011:   if (pc->mat) {MatDestroy(pc->mat);}
1012:   if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1013:   if (pc->pmat) {MatDestroy(pc->pmat);}
1014:   pc->mat  = Amat;
1015:   pc->pmat = Pmat;

1017:   if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) {
1018:     pc->setupcalled = 1;
1019:   }
1020:   pc->flag = flag;
1021:   return(0);
1022: }

1026: /*@C
1027:    PCGetOperators - Gets the matrix associated with the linear system and
1028:    possibly a different one associated with the preconditioner.

1030:    Not collective, though parallel Mats are returned if the PC is parallel

1032:    Input Parameter:
1033: .  pc - the preconditioner context

1035:    Output Parameters:
1036: +  mat - the matrix associated with the linear system
1037: .  pmat - matrix associated with the preconditioner, usually the same
1038:           as mat. 
1039: -  flag - flag indicating information about the preconditioner
1040:           matrix structure.  See PCSetOperators() for details.

1042:    Level: intermediate

1044:    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1045:       are created in PC and returned to the user. In this case, if both operators
1046:       mat and pmat are requested, two DIFFERENT operators will be returned. If
1047:       only one is requested both operators in the PC will be the same (i.e. as
1048:       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1049:       The user must set the sizes of the returned matrices and their type etc just
1050:       as if the user created them with MatCreate(). For example,

1052: $         KSP/PCGetOperators(ksp/pc,&mat,PETSC_NULL,PETSC_NULL); is equivalent to
1053: $           set size, type, etc of mat

1055: $         MatCreate(comm,&mat);
1056: $         KSP/PCSetOperators(ksp/pc,mat,mat,SAME_NONZERO_PATTERN);
1057: $         PetscObjectDereference((PetscObject)mat);
1058: $           set size, type, etc of mat

1060:      and

1062: $         KSP/PCGetOperators(ksp/pc,&mat,&pmat,PETSC_NULL); is equivalent to
1063: $           set size, type, etc of mat and pmat

1065: $         MatCreate(comm,&mat);
1066: $         MatCreate(comm,&pmat);
1067: $         KSP/PCSetOperators(ksp/pc,mat,pmat,SAME_NONZERO_PATTERN);
1068: $         PetscObjectDereference((PetscObject)mat);
1069: $         PetscObjectDereference((PetscObject)pmat);
1070: $           set size, type, etc of mat and pmat

1072:     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1073:     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 
1074:     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1075:     at this is when you create a SNES you do not NEED to create a KSP and attach it to 
1076:     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1077:     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1078:     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1079:     it can be created for you?
1080:      

1082: .keywords: PC, get, operators, matrix, linear system

1084: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1085: @*/
1086: PetscErrorCode  PCGetOperators(PC pc,Mat *mat,Mat *pmat,MatStructure *flag)
1087: {

1092:   if (mat) {
1093:     if (!pc->mat) {
1094:       MatCreate(((PetscObject)pc)->comm,&pc->mat);
1095:       if (!pc->pmat && !pmat) { /* user did NOT request pmat, so make same as mat */
1096:         pc->pmat = pc->mat;
1097:         PetscObjectReference((PetscObject)pc->pmat);
1098:       }
1099:     }
1100:     *mat  = pc->mat;
1101:   }
1102:   if (pmat) {
1103:     if (!pc->pmat) {
1104:       MatCreate(((PetscObject)pc)->comm,&pc->mat);
1105:       if (!pc->mat && !mat) { /* user did NOT request mat, so make same as pmat */
1106:         pc->mat = pc->pmat;
1107:         PetscObjectReference((PetscObject)pc->mat);
1108:       }
1109:     }
1110:     *pmat = pc->pmat;
1111:   }
1112:   if (flag) *flag = pc->flag;
1113:   return(0);
1114: }

1118: /*@C
1119:    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1120:    possibly a different one associated with the preconditioner have been set in the PC.

1122:    Not collective, though the results on all processes should be the same

1124:    Input Parameter:
1125: .  pc - the preconditioner context

1127:    Output Parameters:
1128: +  mat - the matrix associated with the linear system was set
1129: -  pmat - matrix associated with the preconditioner was set, usually the same

1131:    Level: intermediate

1133: .keywords: PC, get, operators, matrix, linear system

1135: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1136: @*/
1137: PetscErrorCode  PCGetOperatorsSet(PC pc,PetscTruth *mat,PetscTruth *pmat)
1138: {
1141:   if (mat)  *mat  = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1142:   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1143:   return(0);
1144: }

1148: /*@
1149:    PCFactorGetMatrix - Gets the factored matrix from the
1150:    preconditioner context.  This routine is valid only for the LU, 
1151:    incomplete LU, Cholesky, and incomplete Cholesky methods.

1153:    Not Collective on PC though Mat is parallel if PC is parallel

1155:    Input Parameters:
1156: .  pc - the preconditioner context

1158:    Output parameters:
1159: .  mat - the factored matrix

1161:    Level: advanced

1163:    Notes: Does not increase the reference count for the matrix so DO NOT destroy it

1165: .keywords: PC, get, factored, matrix
1166: @*/
1167: PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1168: {

1174:   if (pc->ops->getfactoredmatrix) {
1175:     (*pc->ops->getfactoredmatrix)(pc,mat);
1176:   }
1177:   return(0);
1178: }

1182: /*@C
1183:    PCSetOptionsPrefix - Sets the prefix used for searching for all 
1184:    PC options in the database.

1186:    Collective on PC

1188:    Input Parameters:
1189: +  pc - the preconditioner context
1190: -  prefix - the prefix string to prepend to all PC option requests

1192:    Notes:
1193:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1194:    The first character of all runtime options is AUTOMATICALLY the
1195:    hyphen.

1197:    Level: advanced

1199: .keywords: PC, set, options, prefix, database

1201: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1202: @*/
1203: PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1204: {

1209:   PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1210:   return(0);
1211: }

1215: /*@C
1216:    PCAppendOptionsPrefix - Appends to the prefix used for searching for all 
1217:    PC options in the database.

1219:    Collective on PC

1221:    Input Parameters:
1222: +  pc - the preconditioner context
1223: -  prefix - the prefix string to prepend to all PC option requests

1225:    Notes:
1226:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1227:    The first character of all runtime options is AUTOMATICALLY the
1228:    hyphen.

1230:    Level: advanced

1232: .keywords: PC, append, options, prefix, database

1234: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1235: @*/
1236: PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1237: {

1242:   PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1243:   return(0);
1244: }

1248: /*@C
1249:    PCGetOptionsPrefix - Gets the prefix used for searching for all 
1250:    PC options in the database.

1252:    Not Collective

1254:    Input Parameters:
1255: .  pc - the preconditioner context

1257:    Output Parameters:
1258: .  prefix - pointer to the prefix string used, is returned

1260:    Notes: On the fortran side, the user should pass in a string 'prifix' of
1261:    sufficient length to hold the prefix.

1263:    Level: advanced

1265: .keywords: PC, get, options, prefix, database

1267: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1268: @*/
1269: PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1270: {

1276:   PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1277:   return(0);
1278: }

1282: /*@
1283:    PCPreSolve - Optional pre-solve phase, intended for any
1284:    preconditioner-specific actions that must be performed before 
1285:    the iterative solve itself.

1287:    Collective on PC

1289:    Input Parameters:
1290: +  pc - the preconditioner context
1291: -  ksp - the Krylov subspace context

1293:    Level: developer

1295:    Sample of Usage:
1296: .vb
1297:     PCPreSolve(pc,ksp);
1298:     KSPSolve(ksp,b,x);
1299:     PCPostSolve(pc,ksp);
1300: .ve

1302:    Notes:
1303:    The pre-solve phase is distinct from the PCSetUp() phase.

1305:    KSPSolve() calls this directly, so is rarely called by the user.

1307: .keywords: PC, pre-solve

1309: .seealso: PCPostSolve()
1310: @*/
1311: PetscErrorCode  PCPreSolve(PC pc,KSP ksp)
1312: {
1314:   Vec            x,rhs;
1315:   Mat            A,B;

1320:   KSPGetSolution(ksp,&x);
1321:   KSPGetRhs(ksp,&rhs);
1322:   /*
1323:       Scale the system and have the matrices use the scaled form
1324:     only if the two matrices are actually the same (and hence
1325:     have the same scaling
1326:   */
1327:   PCGetOperators(pc,&A,&B,PETSC_NULL);
1328:   if (A == B) {
1329:     MatScaleSystem(pc->mat,rhs,x);
1330:     MatUseScaledForm(pc->mat,PETSC_TRUE);
1331:   }

1333:   if (pc->ops->presolve) {
1334:     (*pc->ops->presolve)(pc,ksp,rhs,x);
1335:   }
1336:   return(0);
1337: }

1341: /*@
1342:    PCPostSolve - Optional post-solve phase, intended for any
1343:    preconditioner-specific actions that must be performed after
1344:    the iterative solve itself.

1346:    Collective on PC

1348:    Input Parameters:
1349: +  pc - the preconditioner context
1350: -  ksp - the Krylov subspace context

1352:    Sample of Usage:
1353: .vb
1354:     PCPreSolve(pc,ksp);
1355:     KSPSolve(ksp,b,x);
1356:     PCPostSolve(pc,ksp);
1357: .ve

1359:    Note:
1360:    KSPSolve() calls this routine directly, so it is rarely called by the user.

1362:    Level: developer

1364: .keywords: PC, post-solve

1366: .seealso: PCPreSolve(), KSPSolve()
1367: @*/
1368: PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1369: {
1371:   Vec            x,rhs;
1372:   Mat            A,B;

1377:   KSPGetSolution(ksp,&x);
1378:   KSPGetRhs(ksp,&rhs);
1379:   if (pc->ops->postsolve) {
1380:      (*pc->ops->postsolve)(pc,ksp,rhs,x);
1381:   }
1382:   /*
1383:       Scale the system and have the matrices use the scaled form
1384:     only if the two matrices are actually the same (and hence
1385:     have the same scaling
1386:   */
1387:   PCGetOperators(pc,&A,&B,PETSC_NULL);
1388:   if (A == B) {
1389:     MatUnScaleSystem(pc->mat,rhs,x);
1390:     MatUseScaledForm(pc->mat,PETSC_FALSE);
1391:   }
1392:   return(0);
1393: }

1397: /*@C
1398:    PCView - Prints the PC data structure.

1400:    Collective on PC

1402:    Input Parameters:
1403: +  PC - the PC context
1404: -  viewer - optional visualization context

1406:    Note:
1407:    The available visualization contexts include
1408: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1409: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1410:          output where only the first processor opens
1411:          the file.  All other processors send their 
1412:          data to the first processor to print. 

1414:    The user can open an alternative visualization contexts with
1415:    PetscViewerASCIIOpen() (output to a specified file).

1417:    Level: developer

1419: .keywords: PC, view

1421: .seealso: KSPView(), PetscViewerASCIIOpen()
1422: @*/
1423: PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1424: {
1425:   const PCType      cstr;
1426:   PetscErrorCode    ierr;
1427:   PetscTruth        mat_exists,iascii,isstring;
1428:   PetscViewerFormat format;

1432:   if (!viewer) {
1433:     PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);
1434:   }

1438:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
1439:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
1440:   if (iascii) {
1441:     PetscViewerGetFormat(viewer,&format);
1442:     if (((PetscObject)pc)->prefix) {
1443:       PetscViewerASCIIPrintf(viewer,"PC Object:(%s)\n",((PetscObject)pc)->prefix);
1444:     } else {
1445:       PetscViewerASCIIPrintf(viewer,"PC Object:\n");
1446:     }
1447:     PCGetType(pc,&cstr);
1448:     if (cstr) {
1449:       PetscViewerASCIIPrintf(viewer,"  type: %s\n",cstr);
1450:     } else {
1451:       PetscViewerASCIIPrintf(viewer,"  type: not yet set\n");
1452:     }
1453:     if (pc->ops->view) {
1454:       PetscViewerASCIIPushTab(viewer);
1455:       (*pc->ops->view)(pc,viewer);
1456:       PetscViewerASCIIPopTab(viewer);
1457:     }
1458:     PetscObjectExists((PetscObject)pc->mat,&mat_exists);
1459:     if (mat_exists) {
1460:       PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1461:       if (pc->pmat == pc->mat) {
1462:         PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n");
1463:         PetscViewerASCIIPushTab(viewer);
1464:         MatView(pc->mat,viewer);
1465:         PetscViewerASCIIPopTab(viewer);
1466:       } else {
1467:         PetscObjectExists((PetscObject)pc->pmat,&mat_exists);
1468:         if (mat_exists) {
1469:           PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n");
1470:         } else {
1471:           PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n");
1472:         }
1473:         PetscViewerASCIIPushTab(viewer);
1474:         MatView(pc->mat,viewer);
1475:         if (mat_exists) {MatView(pc->pmat,viewer);}
1476:         PetscViewerASCIIPopTab(viewer);
1477:       }
1478:       PetscViewerPopFormat(viewer);
1479:     }
1480:   } else if (isstring) {
1481:     PCGetType(pc,&cstr);
1482:     PetscViewerStringSPrintf(viewer," %-7.7s",cstr);
1483:     if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1484:   } else {
1485:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by PC",((PetscObject)viewer)->type_name);
1486:   }
1487:   return(0);
1488: }


1493: /*@
1494:    PCSetInitialGuessNonzero - Tells the iterative solver that the 
1495:    initial guess is nonzero; otherwise PC assumes the initial guess
1496:    is to be zero (and thus zeros it out before solving).

1498:    Collective on PC

1500:    Input Parameters:
1501: +  pc - iterative context obtained from PCCreate()
1502: -  flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero

1504:    Level: Developer

1506:    Notes:
1507:     This is a weird function. Since PC's are linear operators on the right hand side they
1508:     CANNOT use an initial guess. This function is for the "pass-through" preconditioners
1509:     PCKSP, PCREDUNDANT and PCOPENMP and causes the inner KSP object to use the nonzero
1510:     initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP.


1513: .keywords: PC, set, initial guess, nonzero

1515: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
1516: @*/
1517: PetscErrorCode  PCSetInitialGuessNonzero(PC pc,PetscTruth flg)
1518: {
1520:   pc->nonzero_guess   = flg;
1521:   return(0);
1522: }

1526: /*@C
1527:   PCRegister - See PCRegisterDynamic()

1529:   Level: advanced
1530: @*/
1531: PetscErrorCode  PCRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(PC))
1532: {
1534:   char           fullname[PETSC_MAX_PATH_LEN];


1538:   PetscFListConcat(path,name,fullname);
1539:   PetscFListAdd(&PCList,sname,fullname,(void (*)(void))function);
1540:   return(0);
1541: }

1545: /*@
1546:     PCComputeExplicitOperator - Computes the explicit preconditioned operator.  

1548:     Collective on PC

1550:     Input Parameter:
1551: .   pc - the preconditioner object

1553:     Output Parameter:
1554: .   mat - the explict preconditioned operator

1556:     Notes:
1557:     This computation is done by applying the operators to columns of the 
1558:     identity matrix.

1560:     Currently, this routine uses a dense matrix format when 1 processor
1561:     is used and a sparse format otherwise.  This routine is costly in general,
1562:     and is recommended for use only with relatively small systems.

1564:     Level: advanced
1565:    
1566: .keywords: PC, compute, explicit, operator

1568: .seealso: KSPComputeExplicitOperator()

1570: @*/
1571: PetscErrorCode  PCComputeExplicitOperator(PC pc,Mat *mat)
1572: {
1573:   Vec            in,out;
1575:   PetscInt       i,M,m,*rows,start,end;
1576:   PetscMPIInt    size;
1577:   MPI_Comm       comm;
1578:   PetscScalar    *array,one = 1.0;
1579: 

1584:   comm = ((PetscObject)pc)->comm;
1585:   MPI_Comm_size(comm,&size);

1587:   if (!pc->pmat) SETERRQ(PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call");
1588:   MatGetVecs(pc->pmat,&in,0);
1589:   VecDuplicate(in,&out);
1590:   VecGetOwnershipRange(in,&start,&end);
1591:   VecGetSize(in,&M);
1592:   VecGetLocalSize(in,&m);
1593:   PetscMalloc((m+1)*sizeof(PetscInt),&rows);
1594:   for (i=0; i<m; i++) {rows[i] = start + i;}

1596:   MatCreate(comm,mat);
1597:   MatSetSizes(*mat,m,m,M,M);
1598:   if (size == 1) {
1599:     MatSetType(*mat,MATSEQDENSE);
1600:     MatSeqDenseSetPreallocation(*mat,PETSC_NULL);
1601:   } else {
1602:     MatSetType(*mat,MATMPIAIJ);
1603:     MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);
1604:   }

1606:   for (i=0; i<M; i++) {

1608:     VecSet(in,0.0);
1609:     VecSetValues(in,1,&i,&one,INSERT_VALUES);
1610:     VecAssemblyBegin(in);
1611:     VecAssemblyEnd(in);

1613:     /* should fix, allowing user to choose side */
1614:     PCApply(pc,in,out);
1615: 
1616:     VecGetArray(out,&array);
1617:     MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);
1618:     VecRestoreArray(out,&array);

1620:   }
1621:   PetscFree(rows);
1622:   VecDestroy(out);
1623:   MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);
1624:   MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);
1625:   return(0);
1626: }