Actual source code: mpisbaijspooles.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the Spooles parallel sparse solver (MPI SPOOLES)
5: */
7: #include ../src/mat/impls/aij/seq/spooles/spooles.h
8: #include ../src/mat/impls/sbaij/mpi/mpisbaij.h
10: #if !defined(PETSC_USE_COMPLEX)
11: /*
12: input:
13: F: numeric factor
14: output:
15: nneg, nzero, npos: global matrix inertia in all processors
16: */
20: PetscErrorCode MatGetInertia_MPISBAIJSpooles(Mat F,int *nneg,int *nzero,int *npos)
21: {
22: Mat_Spooles *lu = (Mat_Spooles*)F->spptr;
24: int neg,zero,pos,sbuf[3],rbuf[3];
27: FrontMtx_inertia(lu->frontmtx, &neg, &zero, &pos);
28: sbuf[0] = neg; sbuf[1] = zero; sbuf[2] = pos;
29: MPI_Allreduce(sbuf,rbuf,3,MPI_INT,MPI_SUM,((PetscObject)F)->comm);
30: *nneg = rbuf[0]; *nzero = rbuf[1]; *npos = rbuf[2];
31: return(0);
32: }
33: #endif /* !defined(PETSC_USE_COMPLEX) */
35: /* Note the Petsc r permutation is ignored */
38: PetscErrorCode MatCholeskyFactorSymbolic_MPISBAIJSpooles(Mat B,Mat A,IS r,const MatFactorInfo *info)
39: {
41: (B)->ops->choleskyfactornumeric = MatFactorNumeric_MPISpooles;
42: return(0);
43: }
48: PetscErrorCode MatDestroy_MPISBAIJSpooles(Mat A)
49: {
50: Mat_Spooles *lu = (Mat_Spooles*)A->spptr;
52:
54: if (lu->CleanUpSpooles) {
55: FrontMtx_free(lu->frontmtx);
56: IV_free(lu->newToOldIV);
57: IV_free(lu->oldToNewIV);
58: IV_free(lu->vtxmapIV);
59: InpMtx_free(lu->mtxA);
60: ETree_free(lu->frontETree);
61: IVL_free(lu->symbfacIVL);
62: SubMtxManager_free(lu->mtxmanager);
63: DenseMtx_free(lu->mtxX);
64: DenseMtx_free(lu->mtxY);
65: MPI_Comm_free(&(lu->comm_spooles));
66: if ( lu->scat ){
67: VecDestroy(lu->vec_spooles);
68: ISDestroy(lu->iden);
69: ISDestroy(lu->is_petsc);
70: VecScatterDestroy(lu->scat);
71: }
72: }
73: MatDestroy_MPISBAIJ(A);
74: return(0);
75: }
80: PetscErrorCode MatFactorGetSolverPackage_mpisbaij_spooles(Mat A,const MatSolverPackage *type)
81: {
83: *type = MAT_SOLVER_SPOOLES;
84: return(0);
85: }
91: PetscErrorCode MatGetFactor_mpisbaij_spooles(Mat A,MatFactorType ftype,Mat *F)
92: {
93: Mat_Spooles *lu;
94: Mat B;
98: /* Create the factorization matrix F */
99: MatCreate(((PetscObject)A)->comm,&B);
100: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
101: MatSetType(B,((PetscObject)A)->type_name);
102: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
104: PetscNewLog(B,Mat_Spooles,&lu);
105: B->spptr = lu;
106: lu->flg = DIFFERENT_NONZERO_PATTERN;
107: lu->options.useQR = PETSC_FALSE;
109: if (ftype == MAT_FACTOR_CHOLESKY) {
110: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MPISBAIJSpooles;
111: B->ops->view = MatView_Spooles;
112: B->ops->destroy = MatDestroy_MPISBAIJSpooles;
113: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mpisbaij_spooles",MatFactorGetSolverPackage_mpisbaij_spooles);
115: lu->options.symflag = SPOOLES_SYMMETRIC;
116: lu->options.pivotingflag = SPOOLES_NO_PIVOTING;
117: } else SETERRQ(PETSC_ERR_SUP,"Only Cholesky for SBAIJ matrices, use AIJ for LU");
119: B->factor = ftype;
120: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_spooles));
121: *F = B;
122: return(0);
123: }
126: /*MC
127: MAT_SOLVER_SPOOLES - "spooles" - a matrix type providing direct solvers (LU and Cholesky) for distributed symmetric
128: and non-symmetric matrices via the external package Spooles.
130: If Spooles is installed (run config/configure.py with the option --download-spooles)
132: Options Database Keys:
133: + -mat_spooles_tau <tau> - upper bound on the magnitude of the largest element in L or U
134: . -mat_spooles_seed <seed> - random number seed used for ordering
135: . -mat_spooles_msglvl <msglvl> - message output level
136: . -mat_spooles_ordering <BestOfNDandMS,MMD,MS,ND> - ordering used
137: . -mat_spooles_maxdomainsize <n> - maximum subgraph size used by Spooles orderings
138: . -mat_spooles_maxzeros <n> - maximum number of zeros inside a supernode
139: . -mat_spooles_maxsize <n> - maximum size of a supernode
140: . -mat_spooles_FrontMtxInfo <true,fase> - print Spooles information about the computed factorization
141: . -mat_spooles_symmetryflag <0,1,2> - 0: SPOOLES_SYMMETRIC, 1: SPOOLES_HERMITIAN, 2: SPOOLES_NONSYMMETRIC
142: . -mat_spooles_patchAndGoFlag <0,1,2> - 0: no patch, 1: use PatchAndGo strategy 1, 2: use PatchAndGo strategy 2
143: . -mat_spooles_toosmall <dt> - drop tolerance for PatchAndGo strategy 1
144: . -mat_spooles_storeids <bool integer> - if nonzero, stores row and col numbers where patches were applied in an IV object
145: . -mat_spooles_fudge <delta> - fudge factor for rescaling diagonals with PatchAndGo strategy 2
146: - -mat_spooles_storevalues <bool integer> - if nonzero and PatchAndGo strategy 2 is used, store change in diagonal value in a DV object
148: Level: beginner
150: .seealso: MAT_SOLVER_SUPERLU, MAT_SOLVER_MUMPS, MAT_SOLVER_SUPERLU_DIST, PCFactorSetMatSolverPackage(), MatSolverPackage
151: M*/